Merge branch 'chris/wip' into vis-page

This commit is contained in:
2018-11-17 21:34:14 -05:00
10 changed files with 249 additions and 113 deletions

View File

@@ -3,26 +3,35 @@
* a designated parent element
*
* @param audioFeature: the name of the audio feature (string)
* @param intervalEndPoints: a sorted array of 5 real numbers defining the intervals (categories) of values,
* @param intervalEndPoints: a object defining the intervals (categories) of values,
* for example:
* [0, 0.25, 0.5, 0.75, 1.0] for instrumentalness would define ranges
* (0-0.25), (0.25-0.5), (0.5-0.75), (0.75-1.0)
* @param parentElem: the DOM element to append the graph to (a selector string)
* {begin: 0, end: 1.0, step: 0.25} for instrumentalness would define ranges
* [0-0.25), [0.25-0.5), [0.5-0.75), [0.75-1.0]
* @param colId: the DOM element to append the graph to (a selector string)
* @param userSecret: the user secret string for identification
* @return None
*/
function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem, userSecret) {
function drawAudioFeatGraph(audioFeature, intervalEndPoints, colId, userSecret) {
// TODO: Not hard code the dimensions?
let margin = {top: 20, right: 30, bottom: 30, left: 40};
let width = 480 - margin.left - margin.right,
height = 270 - margin.top - margin.bottom;
let featureData = {};
let currentEndPoint = intervalEndPoints.begin; // start at beginning
// Create the keys first in order
for (let index = 0; index < intervalEndPoints.length - 1; index++) {
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
while (currentEndPoint < intervalEndPoints.end) {
let startOfRange = currentEndPoint;
let endOfRange = precise(startOfRange + intervalEndPoints.step);
let key = `${startOfRange} ~ ${endOfRange}`;
featureData[key] = 0;
currentEndPoint = endOfRange;
}
// for (let index = 0; index < intervalEndPoints.length - 1; index++) {
// let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
// featureData[key] = 0;
// }
// define the vertical scaling function
// let vScale = d3.scaleLinear().range([height, 0]);
let padding = 0.5;
@@ -33,12 +42,15 @@ function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem, userSec
// categorize the data points
for (let dataPoint of response.data_points) {
dataPoint = parseFloat(dataPoint);
let index = intervalEndPoints.length - 2;
let currLowerBound = precise(intervalEndPoints.end - intervalEndPoints.step);
let stepSize = intervalEndPoints.step;
// find the index of the first element greater than dataPoint
while (dataPoint < intervalEndPoints[index]) {
index -= 1;
while (dataPoint < currLowerBound && currLowerBound >= intervalEndPoints.begin) {
currLowerBound = precise(currLowerBound - stepSize);
}
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
let upperBound = precise(currLowerBound + stepSize);
currLowerBound = precise(currLowerBound);
let key = `${currLowerBound} ~ ${upperBound}`;
featureData[key] += 1;
}
@@ -63,7 +75,7 @@ function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem, userSec
let xAxis = d3.axisBottom().scale(hScale);
let yAxis = d3.axisLeft().scale(vScale);
let featureSVG = d3.select('#' + parentElem)
let featureSVG = d3.select('#' + colId)
.append('svg').attr('width', width + margin.left + margin.right)
.attr('height', height + margin.top + margin.bottom);
@@ -109,3 +121,12 @@ function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem, userSec
function capFeatureStr(audioFeature) {
return audioFeature.charAt(0).toUpperCase() + audioFeature.slice(1);
}
/**
* Converts a number to a floating point value with 2 significant figures
* @param number: the number to be converted
* @returns the input converted to two significant digits
*/
function precise(number) {
return Number.parseFloat(number.toPrecision(2));
}

View File

@@ -4,7 +4,7 @@ function create_genre_graph(data) {
data.forEach(function(d) {
d.num_songs = +d.num_songs;
console.log(d.genre, d.num_songs);
var artist_names = Object.keys(d.artists);
let artist_names = Object.keys(d.artists);
artist_names.forEach(function(e) {
d.artists[e] = +d.artists[e];
console.log(e, d.artists[e]);
@@ -22,35 +22,34 @@ function create_genre_graph(data) {
x.domain(data.map(function(d) {
return d.genre;
}));
//y.domain([0, d3.max(data, function(d) { return d.num_songs; }) * 1.25]).nice();
// y.domain([0, d3.max(data, function(d) { return d.num_songs; }) * 1.25]).nice();
y.domain([0, d3.max(data, function(d) {
return d.num_songs;
return d.num_songs; // returns the maximum number of songs in the genre
})]).nice();
// }}} domains //
// setup bar colors {{{ //
var max_artists = d3.max(data, function(d) {
let max_artists = d3.max(data, function(d) {
return Object.keys(d.artists).length;
});
var z = d3.scaleOrdinal().range(randomColor({
let colorScale = d3.scaleOrdinal().range(randomColor({
count: max_artists,
luminosity: 'light',
}));
// }}} setup bar colors //
for (var genre_dict of data) {
for (let genre_dict of data) {
// process artist breakdown {{{ //
var keys = Object.keys(genre_dict.artists);
var stack = d3.stack()
//.order(d3.stackOrderAscending)
let keys = Object.keys(genre_dict.artists);
let stack = d3.stack()
.order(d3.stackOrderDescending)
.keys(keys)([genre_dict.artists])
//unpack the column
// unpack the column
.map((d, i) => {
return {
key: keys[i],
@@ -72,8 +71,9 @@ function create_genre_graph(data) {
})
.attr("height", d => y(d.data[0]) - y(d.data[1]))
.attr("width", x.bandwidth())
.attr('fill', (d, i) => z(i))
.append('title').text(d => d.key + ': ' + (d.data[1] - d.data[0]));
.attr('fill', (d, i) => colorScale(i))
// keep 3 significant figures in the song count label
.append('title').text(d => d.key + ': ' + (d.data[1] - d.data[0]).toPrecision(3));
// }}} add bars //
@@ -108,11 +108,11 @@ function create_genre_graph(data) {
}
// wrap text {{{ //
// wrapping long labels
// https://gist.github.com/guypursey/f47d8cd11a8ff24854305505dbbd8c07#file-index-html
function wrap(text, width) {
text.each(function() {
var text = d3.select(this),
let text = d3.select(this),
words = text.text().split(/\s+/).reverse(),
word,
line = [],
@@ -122,13 +122,13 @@ function wrap(text, width) {
dy = parseFloat(text.attr("dy")),
tspan = text.text(null).append("tspan").attr("x", 0).attr("y", y).attr("dy", dy + "em")
while (word = words.pop()) {
line.push(word)
tspan.text(line.join(" "))
line.push(word);
tspan.text(line.join(" "));
if (tspan.node().getComputedTextLength() > width) {
line.pop()
tspan.text(line.join(" "))
line = [word]
tspan = text.append("tspan").attr("x", 0).attr("y", y).attr("dy", `${++lineNumber * lineHeight + dy}em`).text(word)
line.pop();
tspan.text(line.join(" "));
line = [word];
tspan = text.append("tspan").attr("x", 0).attr("y", y).attr("dy", `${++lineNumber * lineHeight + dy}em`).text(word);
}
}
})

View File

@@ -43,22 +43,38 @@
<script src="{% static "graphs/scripts/audio_feat_graph.js" %}"></script>
<script type="text/javascript">
let userSecret = "{{ user_secret }}";
drawAudioFeatGraph("acousticness", [0, 0.25, 0.5, 0.75, 1.0],
'acoustic-column', userSecret);
drawAudioFeatGraph("danceability", [0, 0.25, 0.5, 0.75, 1.0],
'dance-column', userSecret);
drawAudioFeatGraph("energy", [0, 0.25, 0.5, 0.75, 1.0],
'energy-column', userSecret);
drawAudioFeatGraph("instrumentalness", [0, 0.25, 0.5, 0.75, 1.0],
'instr-column', userSecret);
drawAudioFeatGraph("loudness", [-60, -45, -30, -15, 0],
'loud-column', userSecret);
drawAudioFeatGraph("speechiness", [0, 0.25, 0.5, 0.75, 1.0],
'speech-column', userSecret);
drawAudioFeatGraph("tempo", [0, 40, 80, 120, 160, 200],
'tempo-column', userSecret);
drawAudioFeatGraph("valence", [0, 0.25, 0.5, 0.75, 1.0],
'valence-column', userSecret);
let graphParams = {
"acousticness": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'acoustic-column'},
"danceability": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'dance-column'},
"energy": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'energy-column'},
"instrumentalness": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'instr-column'},
"loudness": {
intervalEndPoints: {begin: -60, end: 0, step: 12},
colId: 'loud-column'},
"speechiness": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'speech-column'},
"tempo": {
intervalEndPoints: {begin: 0, end: 200, step: 40},
colId: 'tempo-column'},
"valence": {
intervalEndPoints: {begin: 0, end: 1.0, step: 0.20},
colId: 'valence-column'},
};
for(var featureKey in graphParams) {
let params = graphParams[featureKey];
drawAudioFeatGraph(featureKey, params.intervalEndPoints,
params.colId, userSecret);
}
</script>
</body>
</html>

View File

@@ -13,7 +13,7 @@
<title>Test DB Page</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="{% static 'css/dark_bg.css' %}">
{# <link rel="stylesheet" href="{% static 'css/dark_bg.css' %}">#}
</head>
<!-- }}} header -->
@@ -34,16 +34,17 @@
perserveAspectRatio="xMinYMid">
</svg>
<script>
var svg = d3.select("svg"),
let svg = d3.select("svg"),
margin = {top: 20, right: 20, bottom: 30, left: 40},
width = +svg.attr("width") - margin.left - margin.right,
height = +svg.attr("height") - margin.top - margin.bottom,
g = svg.append("g").attr("transform", "translate(" + margin.left + "," + margin.top + ")");
var x = d3.scaleBand()
let x = d3.scaleBand()
.rangeRound([0, width])
.paddingInner(0.05)
.paddingInner(0.1)
.paddingOuter(0.7)
.align(0.1);
var y = d3.scaleLinear()
let y = d3.scaleLinear()
.rangeRound([height, 0]);
d3.json("{% url "api:get_genre_data" user_secret %}").then(create_genre_graph);