Browse Source

Merge pull request #46 from chrisshyi/artists

Finish setting up artist bubble chart
master
Kevin Mok 7 years ago
committed by GitHub
parent
commit
4811e1b623
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
  1. 14
      reset_db.sh
  2. 3
      reset_db.sql
  3. 93
      spotifyvis/static/spotifyvis/scripts/artist_graph.js
  4. 105
      spotifyvis/static/spotifyvis/scripts/audio_feat_graph.js
  5. 23
      spotifyvis/templates/spotifyvis/artist_graph.html
  6. 123
      spotifyvis/templates/spotifyvis/audio_features.html
  7. 5
      spotifyvis/templates/spotifyvis/logged_in.html
  8. 10
      spotifyvis/urls.py
  9. 51
      spotifyvis/views.py

14
reset_db.sh

@ -0,0 +1,14 @@
# check if in virtual environment
# https://stackoverflow.com/questions/15454174/how-can-a-shell-function-know-if-it-is-running-within-a-virtualenv/15454916
python -c 'import sys; print(sys.real_prefix)' 2>/dev/null && INVENV=1 || INVENV=0
# echo $INVENV
# if $INVENV is 1, then in virtualenv
if [ $INVENV -eq 1 ]; then
rm spotifyvis/migrations/00*
sudo -u postgres psql -f reset_db.sql
python manage.py makemigrations
python manage.py migrate
fi

3
reset_db.sql

@ -0,0 +1,3 @@
DROP DATABASE spotifyvis;
CREATE DATABASE spotifyvis;
GRANT ALL PRIVILEGES ON DATABASE spotifyvis TO django;

93
spotifyvis/static/spotifyvis/scripts/artist_graph.js

@ -0,0 +1,93 @@
/**
* Draws the artist count graph as a bubble chart, and appends it the a designated parent element
* @param artistData: the artist counts data as an array of objects, of the format {'name': artist name, 'num_songs': 50}
* @param parentElem: the DOM element to append the artist graph to (as a string)
*/
function drawArtistGraph(artistData, parentElem) {
let margin = {top: 20, right: 30, bottom: 30, left: 40};
let width = 1000 - margin.right - margin.left;
let height = 1000 - margin.top - margin.bottom;
let color = d3.scaleOrdinal(d3.schemeCategory10);
/*
** Next four variables were part of an attempt to make bubbles larger,
** didn't work
*/
let songCounts = artistData.children.map(function(artist) { return artist.num_songs; }); // array of counts
let songCountExtent = d3.extent(songCounts); // [min song count, max song count]
let circleSize = {
min: 45,
max: 75
};
let circleRadiusScale = d3.scaleSqrt().domain(songCountExtent).range([circleSize.min, circleSize.max]);
let bubble = d3.pack(artistData)
.size([width + 100, height + 100])
.padding(0.2);
let svg = d3.select(parentElem)
.append("svg")
.attr("width", width + margin.right + margin.left)
.attr("height", height + margin.top + margin.bottom)
.attr("class", "bubble");
let nodes = d3.hierarchy(artistData)
.sum(function(d) { return d.num_songs; });
let node = svg.selectAll(".node")
.data(bubble(nodes).leaves())
.enter()
.filter(function(d) {
return !d.children;
})
.append("g")
.attr("class", "node")
.attr("transform", function(d) {
return "translate(" + d.x + "," + d.y + ")";
});
node.append("title")
.text(function(d) {
return d.data.name + ": " + d.data.num_songs;
});
node.append("circle")
.attr("r", function(d) {
return d.r;
})
.style("fill", function(d,i) {
return color(i);
});
// artist name text
node.append("text")
.attr("dy", ".2em")
.style("text-anchor", "middle")
.text(function(d) {
return d.data.name.substring(0, d.r / 3);
})
.attr("font-family", "sans-serif")
.attr("font-size", function(d){
return d.r/5;
})
.attr("fill", "white");
// artist song count text
node.append("text")
.attr("dy", "1.3em")
.style("text-anchor", "middle")
.text(function(d) {
return d.data.num_songs;
})
.attr("font-family", "Gill Sans", "Gill Sans MT")
.attr("font-size", function(d){
return d.r/5;
})
.attr("fill", "white");
d3.select(self.frameElement)
.style("height", height + "px");
}

105
spotifyvis/static/spotifyvis/scripts/audio_feat_graph.js

@ -0,0 +1,105 @@
/** Queries the backend for audio feature data, draws the bar chart
* illustrating the frequencies of values, and appends the chart to
* a designated parent element
*
* @param audioFeature: the name of the audio feature (string)
* @param intervalEndPoints: a sorted array of 5 real numbers defining the intervals (categories) of values,
* for example:
* [0, 0.25, 0.5, 0.75, 1.0] for instrumentalness would define ranges
* (0-0.25), (0.25-0.5), (0.5-0.75), (0.75-1.0)
* @param parentElem: the DOM element to append the graph to (a selector string)
* @param userSecret: the user secret string for identification
* @return None
*/
function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem, userSecret) {
// TODO: Not hard code the dimensions?
let margin = {top: 20, right: 30, bottom: 30, left: 40};
let width = 480 - margin.left - margin.right,
height = 270 - margin.top - margin.bottom;
let featureData = {};
// Create the keys first in order
for (let index = 0; index < intervalEndPoints.length - 1; index++) {
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
featureData[key] = 0;
}
// define the vertical scaling function
let vScale = d3.scaleLinear().range([height, 0]);
d3.json(`/api/audio_features/${audioFeature}/${userSecret}`)
.then(function(response) {
// categorize the data points
for (let dataPoint of response.data_points) {
dataPoint = parseFloat(dataPoint);
let index = intervalEndPoints.length - 2;
// find the index of the first element greater than dataPoint
while (dataPoint < intervalEndPoints[index]) {
index -= 1;
}
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
featureData[key] += 1;
}
let dataSet = Object.values(featureData);
let dataRanges = Object.keys(featureData); // Ranges of audio features, e.g. 0-0.25, 0.25-0.5, etc
let dataArr = [];
// turn the counts into an array of objects, e.g. {range: "0-0.25", counts: 5}
for (let i = 0; i < dataRanges.length; i++) {
dataArr.push({
range: dataRanges[i],
counts: featureData[dataRanges[i]]
});
}
vScale.domain([0, d3.max(dataSet)]).nice();
let hScale = d3.scaleBand().domain(dataRanges).rangeRound([0, width]).padding(0.5);
let xAxis = d3.axisBottom().scale(hScale);
let yAxis = d3.axisLeft().scale(vScale);
let featureSVG = d3.select(parentElem)
.append('svg').attr('width', width + margin.left + margin.right)
.attr('height', height + margin.top + margin.bottom);
let featureGraph = featureSVG.append("g")
.attr("transform", `translate(${margin.left}, ${margin.top})`)
.attr("fill", "teal");
featureGraph.selectAll(".bar")
.data(dataArr)
.enter().append('rect')
.attr('class', 'bar')
.attr('x', function(d) { return hScale(d.range); })
.attr('y', function(d) { return vScale(d.counts); })
.attr("height", function(d) { return height - vScale(d.counts); })
.attr("width", hScale.bandwidth());
// function(d) { return hScale(d.range); }
featureGraph.append('g')
.attr('class', 'axis')
.attr('transform', `translate(0, ${height})`)
.call(xAxis);
featureGraph.append('g')
.attr('class', 'axis')
.call(yAxis);
featureSVG.append("text")
.attr('x', (width / 2))
.attr('y', (margin.top / 2))
.attr('text-anchor', 'middle')
.style('font-size', '14px')
.text(`${capFeatureStr(audioFeature)}`);
});
}
/**
* Returns the audio feature name string with the first letter capitalized
* @param audioFeature: the name of the audio feature
* @returns the audio feature name string with the first letter capitalized
*/
function capFeatureStr(audioFeature) {
return audioFeature.charAt(0).toUpperCase() + audioFeature.slice(1);
}

23
spotifyvis/templates/spotifyvis/artist_graph.html

@ -0,0 +1,23 @@
<!DOCTYPE html>
{% load static %}
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Artist Graphs</title>
</head>
<body>
<p>Logged in as {{ user_id }}</p>
<script src="https://d3js.org/d3.v5.js"></script>
<script src="{% static "spotifyvis/scripts/artist_graph.js" %}"></script>
<script>
d3.json("{% url "get_artist_data" user_secret %}").then(function(data) {
// this is the data format needed for bubble charts
data = {
children: data
};
drawArtistGraph(data, "body");
});
</script>
</body>
</html>

123
spotifyvis/templates/spotifyvis/audio_features.html

@ -22,120 +22,17 @@
<![endif]--> <![endif]-->
<p>Logged in as {{ user_id }}</p> <p>Logged in as {{ user_id }}</p>
<script src="https://d3js.org/d3.v5.js"></script> <script src="https://d3js.org/d3.v5.js"></script>
<script src="{% static "spotifyvis/scripts/audio_feat_graph.js" %}"></script>
<script type="text/javascript"> <script type="text/javascript">
/** Queries the backend for audio feature data, draws the bar chart
* illustrating the frequencies of values, and appends the chart to
* a designated parent element
*
* @param audioFeature: the name of the audio feature (string)
* @param intervalEndPoints: a sorted array of 5 real numbers defining the intervals (categories) of values,
* for example:
* [0, 0.25, 0.5, 0.75, 1.0] for instrumentalness would define ranges
* (0-0.25), (0.25-0.5), (0.5-0.75), (0.75-1.0)
* @param parentElem: the DOM element to append the graph to (a selector string)
* @return None
*/
function drawAudioFeatGraph(audioFeature, intervalEndPoints, parentElem) {
let margin = {top: 20, right: 30, bottom: 30, left: 40};
let width = 480 - margin.left - margin.right,
height = 270 - margin.top - margin.bottom;
let featureData = {};
// Create the keys first in order
for (let index = 0; index < intervalEndPoints.length - 1; index++) {
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
featureData[key] = 0;
}
// define the vertical scaling function
let vScale = d3.scaleLinear().range([height, 0]);
d3.json(`/audio_features/${audioFeature}/{{ user_secret }}`)
.then(function(response) {
// categorize the data points
for (let dataPoint of response.data_points) {
dataPoint = parseFloat(dataPoint);
let index = intervalEndPoints.length - 2;
// find the index of the first element greater than dataPoint
while (dataPoint < intervalEndPoints[index]) {
index -= 1;
}
let key = `${intervalEndPoints[index]} ~ ${intervalEndPoints[index + 1]}`;
featureData[key] += 1;
}
let dataSet = Object.values(featureData);
let dataRanges = Object.keys(featureData); // Ranges of audio features, e.g. 0-0.25, 0.25-0.5, etc
let dataArr = [];
// turn the counts into an array of objects, e.g. {range: "0-0.25", counts: 5}
for (let i = 0; i < dataRanges.length; i++) {
dataArr.push({
range: dataRanges[i],
counts: featureData[dataRanges[i]]
});
}
vScale.domain([0, d3.max(dataSet)]).nice();
let hScale = d3.scaleBand().domain(dataRanges).rangeRound([0, width]).padding(0.5);
let xAxis = d3.axisBottom().scale(hScale);
let yAxis = d3.axisLeft().scale(vScale);
let featureSVG = d3.select(parentElem)
.append('svg').attr('width', width + margin.left + margin.right)
.attr('height', height + margin.top + margin.bottom);
let featureGraph = featureSVG.append("g")
.attr("transform", `translate(${margin.left}, ${margin.top})`)
.attr("fill", "teal");
featureGraph.selectAll(".bar")
.data(dataArr)
.enter().append('rect')
.attr('class', 'bar')
.attr('x', function(d) { return hScale(d.range); })
.attr('y', function(d) { return vScale(d.counts); })
.attr("height", function(d) { return height - vScale(d.counts); })
.attr("width", hScale.bandwidth());
// function(d) { return hScale(d.range); }
featureGraph.append('g')
.attr('class', 'axis')
.attr('transform', `translate(0, ${height})`)
.call(xAxis);
featureGraph.append('g')
.attr('class', 'axis')
.call(yAxis);
featureSVG.append("text")
.attr('x', (width / 2))
.attr('y', (margin.top / 2))
.attr('text-anchor', 'middle')
.style('font-size', '14px')
.text(`${capFeatureStr(audioFeature)}`);
});
}
/**
* Returns the audio feature name string with the first letter capitalized
* @param audioFeature: the name of the audio feature
* @returns the audio feature name string with the first letter capitalized
*/
function capFeatureStr(audioFeature) {
return audioFeature.charAt(0).toUpperCase() + audioFeature.slice(1);
}
drawAudioFeatGraph("instrumentalness", [0, 0.25, 0.5, 0.75, 1.0], 'body');
drawAudioFeatGraph("valence", [0, 0.25, 0.5, 0.75, 1.0], 'body');
drawAudioFeatGraph("energy", [0, 0.25, 0.5, 0.75, 1.0], 'body');
drawAudioFeatGraph("tempo", [40, 80, 120, 160, 200], 'body');
drawAudioFeatGraph("danceability", [0, 0.25, 0.5, 0.75, 1.0], 'body');
drawAudioFeatGraph("acousticness", [0, 0.25, 0.5, 0.75, 1.0], 'body');
drawAudioFeatGraph("loudness", [-60, -45, -30, -15, 0], 'body');
drawAudioFeatGraph("speechiness", [0, 0.25, 0.5, 0.75, 1.0], 'body');
let userSecret = "{{ user_secret }}";
drawAudioFeatGraph("instrumentalness", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
drawAudioFeatGraph("valence", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
drawAudioFeatGraph("energy", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
drawAudioFeatGraph("tempo", [0, 40, 80, 120, 160, 200], 'body', userSecret);
drawAudioFeatGraph("danceability", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
drawAudioFeatGraph("acousticness", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
drawAudioFeatGraph("loudness", [-60, -45, -30, -15, 0], 'body', userSecret);
drawAudioFeatGraph("speechiness", [0, 0.25, 0.5, 0.75, 1.0], 'body', userSecret);
</script> </script>
</body> </body>
</html> </html>

5
spotifyvis/templates/spotifyvis/logged_in.html

@ -9,9 +9,12 @@
</head> </head>
<body> <body>
<h1>{{ user_id }}'s Graphs</h1> <h1>{{ user_id }}'s Graphs</h1>
<a class="btn btn-primary" href="/audio_features/{{ user_secret }}"
<a class="btn btn-primary" href="{% url "display_audio_features" user_secret %}"
role="button">Audio Features</a> role="button">Audio Features</a>
<a class="btn btn-primary" href="{% url "display_genre_graph" user_secret %}" <a class="btn btn-primary" href="{% url "display_genre_graph" user_secret %}"
role="button">Genres</a> role="button">Genres</a>
<a class="btn btn-primary" href="{% url "display_artist_graph" user_secret %}" role="button">
Artists
</a>
</body> </body>
</html> </html>

10
spotifyvis/urls.py

@ -1,5 +1,4 @@
from django.urls import path, include from django.urls import path, include
from django.conf.urls import url
from .views import * from .views import *
@ -9,11 +8,12 @@ urlpatterns = [
path('callback', callback, name='callback'), path('callback', callback, name='callback'),
path('user_data', user_data, name='user_data'), path('user_data', user_data, name='user_data'),
path('admin_graphs', admin_graphs, name='admin_graphs'), path('admin_graphs', admin_graphs, name='admin_graphs'),
path('user_artists/<str:user_id>', get_artist_data, name='get_artist_data'),
path('api/user_artists/<str:user_secret>', get_artist_data, name='get_artist_data'),
path('graphs/artists/<str:user_secret>', artist_data, name='display_artist_graph'),
path('api/user_genres/<str:user_secret>', get_genre_data, name='get_genre_data'), path('api/user_genres/<str:user_secret>', get_genre_data, name='get_genre_data'),
path('graphs/genre/<str:client_secret>', display_genre_graph,
path('graphs/genre/<str:user_secret>', display_genre_graph,
name='display_genre_graph'), name='display_genre_graph'),
path('audio_features/<str:client_secret>', audio_features, name='audio_features'),
path('audio_features/<str:audio_feature>/<str:client_secret>',
path('graphs/audio_features/<str:user_secret>', audio_features, name='display_audio_features'),
path('api/audio_features/<str:audio_feature>/<str:user_secret>',
get_audio_feature_data, name='get_audio_feature_data'), get_audio_feature_data, name='get_audio_feature_data'),
] ]

51
spotifyvis/views.py

@ -183,12 +183,33 @@ def admin_graphs(request):
update_track_genres(user_obj) update_track_genres(user_obj)
return render(request, 'spotifyvis/logged_in.html', context) return render(request, 'spotifyvis/logged_in.html', context)
def artist_data(request, user_secret):
"""Renders the artist data graph display page
:param request: the HTTP request
:param user_secret: the user secret used for identification
:return: render the artist data graph display page
"""
user = User.objects.get(user_secret=user_secret)
context = {
'user_id': user.user_id,
'user_secret': user_secret,
}
return render(request, "spotifyvis/artist_graph.html", context)
# get_artist_data {{{ # # get_artist_data {{{ #
def get_artist_data(request, user_secret): def get_artist_data(request, user_secret):
"""TODO
"""Returns artist data as a JSON serialized list of dictionaries
The (key, value) pairs are (artist name, song count for said artist)
:param request: the HTTP request
:param user_secret: the user secret used for identification
:return: a JsonResponse
""" """
user = User.objects.get(user_id=user_secret)
user = User.objects.get(user_secret=user_secret)
artist_counts = Artist.objects.annotate(num_songs=Count('track', artist_counts = Artist.objects.annotate(num_songs=Count('track',
filter=Q(track__users=user))) filter=Q(track__users=user)))
processed_artist_counts = [{'name': artist.name, processed_artist_counts = [{'name': artist.name,
@ -197,32 +218,40 @@ def get_artist_data(request, user_secret):
# }}} get_artist_data # # }}} get_artist_data #
def display_genre_graph(request, client_secret):
user = User.objects.get(user_secret=client_secret)
def display_genre_graph(request, user_secret):
user = User.objects.get(user_secret=user_secret)
context = { context = {
'user_secret': client_secret,
'user_secret': user_secret,
} }
return render(request, "spotifyvis/genre_graph.html", context) return render(request, "spotifyvis/genre_graph.html", context)
def audio_features(request, client_secret):
user = User.objects.get(user_secret=client_secret)
def audio_features(request, user_secret):
"""Renders the audio features page
:param request: the HTTP request
:param user_secret: user secret used for identification
:return: renders the audio features page
"""
user = User.objects.get(user_secret=user_secret)
context = { context = {
'user_id': user.user_id, 'user_id': user.user_id,
'user_secret': client_secret,
'user_secret': user_secret,
} }
return render(request, "spotifyvis/audio_features.html", context) return render(request, "spotifyvis/audio_features.html", context)
# get_audio_feature_data {{{ # # get_audio_feature_data {{{ #
def get_audio_feature_data(request, audio_feature, client_secret):
def get_audio_feature_data(request, audio_feature, user_secret):
"""Returns all data points for a given audio feature """Returns all data points for a given audio feature
Args: Args:
request: the HTTP request request: the HTTP request
audio_feature: The audio feature to be queried audio_feature: The audio feature to be queried
client_secret: client secret, used to identify the user
user_secret: client secret, used to identify the user
""" """
user = User.objects.get(user_secret=client_secret)
user = User.objects.get(user_secret=user_secret)
user_tracks = Track.objects.filter(users=user) user_tracks = Track.objects.filter(users=user)
response_payload = { response_payload = {
'data_points': [], 'data_points': [],

Loading…
Cancel
Save