Browse Source
Merge branch 'audio-features' of https://github.com/chrisshyi/spotify-lib-vis into database
master
Merge branch 'audio-features' of https://github.com/chrisshyi/spotify-lib-vis into database
master
Kevin Mok
7 years ago
8 changed files with 182 additions and 38 deletions
-
5spotifyvis/admin.py
-
9spotifyvis/models.py
-
28spotifyvis/static/spotifyvis/scripts/user_data.js
-
141spotifyvis/templates/spotifyvis/audio_features.html
-
12spotifyvis/templates/spotifyvis/logged_in.html
-
1spotifyvis/urls.py
-
2spotifyvis/utils.py
-
22spotifyvis/views.py
@ -1,3 +1,8 @@ |
|||
from django.contrib import admin |
|||
from .models import Track, Artist, AudioFeatures, User |
|||
|
|||
# Register your models here. |
|||
admin.site.register(Track) |
|||
admin.site.register(Artist) |
|||
admin.site.register(AudioFeatures) |
|||
admin.site.register(User) |
@ -1,28 +0,0 @@ |
|||
/** |
|||
* Retrieves data for a specific audio feature for a certain user |
|||
* @param audioFeature: the audio feature for which data will be retrieved |
|||
* @param clientSecret: the client secret, needed for security |
|||
*/ |
|||
function getAudioFeatureData(audioFeature, userSecret) { |
|||
let httpRequest = new XMLHttpRequest(); |
|||
/* |
|||
* Handler for the response |
|||
*/ |
|||
httpRequest.onreadystatechange = function() { |
|||
if (httpRequest.readyState === XMLHttpRequest.DONE) { |
|||
if (httpRequest.status === 200) { |
|||
let responseData = JSON.parse(httpRequest.responseText); |
|||
// TODO: The data points need to be plotted instead
|
|||
for (let data of responseData.data_points) { |
|||
console.log(data); |
|||
} |
|||
} else { |
|||
alert("There was a problem with the login request, please try again!"); |
|||
} |
|||
} |
|||
}; |
|||
|
|||
let queryString = `/audio_features/${audioFeature}/${userSecret}`; |
|||
httpRequest.open('GET', queryString, true); |
|||
httpRequest.send(); |
|||
} |
@ -0,0 +1,141 @@ |
|||
{% load static %} |
|||
<!DOCTYPE html> |
|||
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]--> |
|||
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]--> |
|||
<!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]--> |
|||
<!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]--> |
|||
<head> |
|||
<meta charset="utf-8"> |
|||
<meta http-equiv="X-UA-Compatible" content="IE=edge"> |
|||
<title>User Spotify Data</title> |
|||
<meta name="description" content=""> |
|||
<meta name="viewport" content="width=device-width, initial-scale=1"> |
|||
<style> |
|||
.tick { |
|||
font-size: 15px; |
|||
} |
|||
</style> |
|||
</head> |
|||
<body> |
|||
<!--[if lt IE 7]> |
|||
<p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="#">upgrade your browser</a> to improve your experience.</p> |
|||
<![endif]--> |
|||
<p>Logged in as {{ user_id }}</p> |
|||
<script src="https://d3js.org/d3.v5.js"></script> |
|||
<script type="text/javascript"> |
|||
|
|||
/** Queries the backend for audio feature data, draws the bar chart |
|||
* illustrating the frequencies of values, and appends the chart to |
|||
* a designated parent element |
|||
* |
|||
* @param audioFeature: the name of the audio feature (string) |
|||
* @param intervalEndPoints: a sorted array of 5 real numbers defining the intervals (categories) of values, |
|||
* for example: |
|||
* [0, 0.25, 0.5, 0.75, 1.0] for instrumentalness would define ranges |
|||
* (0-0.25), (0.25-0.5), (0.5-0.75), (0.75-1.0) |
|||
* @param parentElem: the DOM element to append the graph to (a selector string) |
|||
* @return None |
|||
*/ |
|||
function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem) { |
|||
let margin = {top: 20, right: 30, bottom: 30, left: 40}; |
|||
let width = 480 - margin.left - margin.right, |
|||
height = 270 - margin.top - margin.bottom; |
|||
|
|||
let featureData = {}; |
|||
// Create the keys first in order |
|||
for (let index = 0; index < intervalEndPoints.length - 1; index++) { |
|||
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`; |
|||
featureData[key] = 0; |
|||
} |
|||
// define the vertical scaling function |
|||
let vScale = d3.scaleLinear().range([height, 0]); |
|||
|
|||
d3.json(`/audio_features/${audioFeature}/{{ user_secret }}`) |
|||
.then(function(response) { |
|||
// categorize the data points |
|||
for (let dataPoint of response.data_points) { |
|||
dataPoint = parseFloat(dataPoint); |
|||
let index = intervalEndPoints.length - 2; |
|||
// find the index of the first element greater than dataPoint |
|||
while (dataPoint < intervalEndPoints[index]) { |
|||
index -= 1; |
|||
} |
|||
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`; |
|||
featureData[key] += 1; |
|||
} |
|||
|
|||
let dataSet = Object.values(featureData); |
|||
let dataRanges = Object.keys(featureData); // Ranges of audio features, e.g. 0-0.25, 0.25-0.5, etc |
|||
let dataArr = []; |
|||
// turn the counts into an array of objects, e.g. {range: "0-0.25", counts: 5} |
|||
for (let i = 0; i < dataRanges.length; i++) { |
|||
dataArr.push({ |
|||
range: dataRanges[i], |
|||
counts: featureData[dataRanges[i]] |
|||
}); |
|||
} |
|||
vScale.domain([0, d3.max(dataSet)]).nice(); |
|||
|
|||
let hScale = d3.scaleBand().domain(dataRanges).rangeRound([0, width]).padding(0.5); |
|||
|
|||
let xAxis = d3.axisBottom().scale(hScale); |
|||
let yAxis = d3.axisLeft().scale(vScale); |
|||
|
|||
let featureSVG = d3.select(parentElem) |
|||
.append('svg').attr('width', width + margin.left + margin.right) |
|||
.attr('height', height + margin.top + margin.bottom); |
|||
|
|||
let featureGraph = featureSVG.append("g") |
|||
.attr("transform", `translate(${margin.left}, ${margin.top})`) |
|||
.attr("fill", "teal"); |
|||
|
|||
featureGraph.selectAll(".bar") |
|||
.data(dataArr) |
|||
.enter().append('rect') |
|||
.attr('class', 'bar') |
|||
.attr('x', function(d) { return hScale(d.range); }) |
|||
.attr('y', function(d) { return vScale(d.counts); }) |
|||
.attr("height", function(d) { return height - vScale(d.counts); }) |
|||
.attr("width", hScale.bandwidth()); |
|||
|
|||
// function(d) { return hScale(d.range); } |
|||
|
|||
featureGraph.append('g') |
|||
.attr('class', 'axis') |
|||
.attr('transform', `translate(0, ${height})`) |
|||
.call(xAxis); |
|||
|
|||
featureGraph.append('g') |
|||
.attr('class', 'axis') |
|||
.call(yAxis); |
|||
|
|||
featureSVG.append("text") |
|||
.attr('x', (width / 2)) |
|||
.attr('y', (margin.top / 2)) |
|||
.attr('text-anchor', 'middle') |
|||
.style('font-size', '14px') |
|||
.text(`${capFeatureStr(audioFeature)}`); |
|||
|
|||
}); |
|||
} |
|||
|
|||
/** |
|||
* Returns the audio feature name string with the first letter capitalized |
|||
* @param audioFeature: the name of the audio feature |
|||
* @returns the audio feature name string with the first letter capitalized |
|||
*/ |
|||
function capFeatureStr(audioFeature) { |
|||
return audioFeature.charAt(0).toUpperCase() + audioFeature.slice(1); |
|||
} |
|||
|
|||
drawAudioFeatGraph("instrumentalness", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
drawAudioFeatGraph("valence", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
drawAudioFeatGraph("energy", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
drawAudioFeatGraph("tempo", [40, 80, 120, 160, 200], 'body'); |
|||
drawAudioFeatGraph("danceability", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
drawAudioFeatGraph("acousticness", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
drawAudioFeatGraph("loudness", [-60, -45, -30, -15, 0], 'body'); |
|||
drawAudioFeatGraph("speechiness", [0, 0.25, 0.5, 0.75, 1.0], 'body'); |
|||
</script> |
|||
</body> |
|||
</html> |
@ -0,0 +1,12 @@ |
|||
<!DOCTYPE html> |
|||
{% load static %} |
|||
<html lang="en"> |
|||
<head> |
|||
<meta charset="UTF-8"> |
|||
<title>Logged In</title> |
|||
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0/css/bootstrap.min.css" integrity="sha384-Gn5384xqQ1aoWXA+058RXPxPg6fy4IWvTNh0E263XmFcJlSAwiGgFAW/dAiS6JXm" crossorigin="anonymous"> |
|||
</head> |
|||
<body> |
|||
<a class="btn btn-primary" href="/audio_features/{{ user_secret }}" role="button">Audio Features</a> |
|||
</body> |
|||
</html> |
Write
Preview
Loading…
Cancel
Save
Reference in new issue