Skip to content
This repository has been archived by the owner on Nov 30, 2021. It is now read-only.

Commit

Permalink
Release 3.0.0 (#24)
Browse files Browse the repository at this point in the history
Release 3.0.0
  • Loading branch information
dvvanessastoiber authored Jan 16, 2020
2 parents 4b59db5 + 25b9572 commit f24cf97
Show file tree
Hide file tree
Showing 21 changed files with 292 additions and 195 deletions.
30 changes: 22 additions & 8 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,27 @@ jobs:
build:
working_directory: ~/phovea
docker:
- image: caleydo/phovea_circleci_python:v2.0
- image: circleci/python:3.7-buster-node-browsers # for node version see Dockerfile on https://hub.docker.com/r/circleci/python
steps:
- checkout
- run:
name: Show Node.js and npm version
command: |
node -v
npm -v
- run:
name: Show Python and pip version
command: |
python --version
pip --version
- run:
name: Install Docker packages from docker_packages.txt
command: |
(!(test -f docker_packages.txt) || (cat docker_packages.txt | xargs sudo apt-get install -y))
- restore_cache:
key: deps1-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements_dev.txt" }}
- run:
name: install-pip-wee
name: Install pip requirements
command: |
virtualenv ~/venv
. ~/venv/bin/activate
Expand All @@ -22,25 +33,28 @@ jobs:
key: deps1-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements_dev.txt" }}
paths:
- ~/venv
- run: #force update of VCS dependencies?
name: update-pip-vcs-dependencies
- run:
name: Force an update of pip dependencies from git repositories # not sure if this is working ?
command: |
. ~/venv/bin/activate
pip install --upgrade --upgrade-strategy=only-if-needed -r requirements.txt
- run:
name: dist
name: Show installed pip packages
command: pip list || true
- run:
name: Build
command: |
. ~/venv/bin/activate
npm run dist
- store_artifacts:
path: dist
prefix: dist
destination: dist
workflows:
version: 2
# build-nightly:
# triggers:
# - schedule: # nightly build during weekday
# cron: "15 1 * * 1-5"
# - schedule:
# cron: "15 1 * * 1-5" # "At 01:15 on every day-of-week from Monday through Friday.”, see: https://crontab.guru/#15_1_*_*_1-5
# filters:
# branches:
# only:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ __pycache__/
# due to using tox and pytest
.tox
.cache
package-lock.json
27 changes: 0 additions & 27 deletions .travis.yml

This file was deleted.

8 changes: 7 additions & 1 deletion .yo-rc.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@
"debianPackages": [],
"redhatPackages": []
},
"today": "Tue, 08 Nov 2016 08:36:05 GMT"
"today": "Tue, 08 Nov 2016 08:36:05 GMT",
"promptValues": {
"authorName": "The Caleydo Team",
"authorEmail": "contact@caleydo.org",
"authorUrl": "https://caleydo.org",
"githubAccount": "caleydo"
}
}
}
2 changes: 1 addition & 1 deletion build.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def _resolve_plugin(repo, version):
if os.path.isdir('.git') and repo:
if repo.endswith('.git'):
repo = repo[0:-4]
return repo + '/commit/' + _git_head('.')
return repo + '/commit/' + _git_head('.').decode('utf-8')
# not a git repo
return version

Expand Down
69 changes: 69 additions & 0 deletions buildPython.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/**
* Created by sam on 13.11.2016.
*/

const spawnSync = require('child_process').spawnSync;
const fs = require('fs');

function gitHead(cwd) {
const r = spawnSync('git', ['rev-parse', '--verify', 'HEAD'], {
cwd: cwd
});
if (!r.stdout) {
console.error(cwd, r.error);
return 'error';
}
return r.stdout.toString().trim();
}

function resolvePlugin(repo, version) {
if (fs.lstatSync('.git').isDirectory() && repo) {
if (repo.endsWith('.git')) {
repo = repo.slice(0, repo.length - 4);
return repo + '/commit/' + gitHead('.');
}
}
// not a git repo
return version;
}

function toVersion(v) {
const now = new Date().toISOString();
// %Y%m%d-%H%M%S
const fmt = now
.replace(/T/, ' ')
.replace(/\..+/, '')
.replace(/[-:]/, '')
.replace(' ', '-');
return v.replace('SNAPSHOT', fmt);
}

function _main() {
const pkg = require('./package.json');
const name = pkg.name;
const version = toVersion(pkg.version);
const resolved = resolvePlugin((pkg.repository || {}).url, version);

const buildInfo = {
name,
version,
resolved,
description: pkg.description,
homepage: pkg.homepage,
repository: (pkg.repository || {}).url
};

const l = ('build/source/' + name.toLowerCase()).split('/');
l.forEach((_, i) => {
const path = l.slice(0, i + 1).join('/');
if (!fs.existsSync(path)) {
fs.mkdirSync(path);
}
});

fs.writeFileSync('build/source/' + name.toLowerCase() + '/buildInfo.json', JSON.stringify(buildInfo, null, ' '));
}

if (require.main === module) {
_main();
}
81 changes: 42 additions & 39 deletions data/olympics_generator/count_by_year.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,41 @@
import csv
import json

createdCSVs = []
created_cvs_list = []

def writeIndexJson():

def write_index_json():
with open('../index.json', 'w') as outfile:
json.dump(createdCSVs, outfile)
json.dump(created_cvs_list, outfile)


def writeCSV(year, medalType, fieldnames, medalsPerCountry):
def write_csv(year, medal_type, fieldnames, medals_per_country):
if year is None:
print('Invalid year -> file not written')
return

name = 'Olympic Games ' + year + ' (' + medalType + ' Medals)'
filename = 'olympics_' + year + '_' + medalType.lower() + '.csv'
name = 'Olympic Games ' + year + ' (' + medal_type + ' Medals)'
filename = 'olympics_' + year + '_' + medal_type.lower() + '.csv'

# sort countries by sum of all medals
sortedBySum = sorted(medalsPerCountry.items(), key=lambda x: sum(x[1].values()), reverse=True)
sorted_by_sum = sorted(medals_per_country.items(), key=lambda x: sum(x[1].values()), reverse=True)

print('----------------')
print('Write ' + filename)
print(fieldnames)
print(sortedBySum)
print(sorted_by_sum)

# get min and max value of the whole csv for the range
maxValue = float('-inf')
#minValue = float('inf') # does not work, because we fill empty cells with 0 by default
max_value = float('-inf')
# min_value = float('inf') # does not work, because we fill empty cells with 0 by default

with open('../' + filename, 'wb') as output:
writer = csv.DictWriter(output, fieldnames=fieldnames, restval='0', dialect='excel')
writer.writeheader()
for k, v in sortedBySum:
for k, v in sorted_by_sum:
values = list(v.values())
maxValue = max(maxValue, max(values))
#minValue = min(minValue, min(values))
max_value = max(max_value, max(values))
# min_value = min(min_value, min(values))
v['CountryCode'] = k
writer.writerow(v)

Expand All @@ -43,57 +44,59 @@ def writeCSV(year, medalType, fieldnames, medalsPerCountry):
stats['name'] = name
stats['path'] = filename
stats['type'] = 'matrix'
stats['size'] = [len(sortedBySum), len(fieldnames)-1] # -1 = CountryCode fieldname
stats['size'] = [len(sorted_by_sum), len(fieldnames)-1] # -1 = CountryCode fieldname
stats['rowtype'] = 'Country'
stats['coltype'] = 'Discipline'
stats['value'] = dict(type='real', range=[0, maxValue])
stats['value'] = dict(type='real', range=[0, max_value])

createdCSVs.append(stats)
created_cvs_list.append(stats)

print('----------------')

def readCSV(medalType = 'Total'):

def read_csv(medal_type='Total'):
with open('./MedalData1.csv', 'rb') as csvfile:
reader = csv.DictReader(csvfile, fieldnames=['Games','Sport','Event','Athlete(s)','CountryCode','CountryName','Medal','ResultInSeconds'], dialect='excel-tab')
reader = csv.DictReader(csvfile, fieldnames=['Games', 'Sport', 'Event', 'Athlete(s)', 'CountryCode', 'CountryName', 'Medal', 'ResultInSeconds'], dialect='excel-tab')
next(reader)

lastGames = None
last_games = None
fieldnames = ['CountryCode']
medalsPerCountry = dict()
medals_per_country = dict()

for row in reader:
if row['Games'] != lastGames:
if row['Games'] != last_games:
# write old year when a new year is detected
writeCSV(lastGames, medalType, fieldnames, medalsPerCountry)
write_csv(last_games, medal_type, fieldnames, medals_per_country)

# clean up variables
fieldnames = ['CountryCode']
medalsPerCountry = dict()
medals_per_country = dict()

lastGames = row['Games']
country = row['CountryCode'] # short-cut
last_games = row['Games']
country = row['CountryCode'] # short-cut

if row['Event'] not in fieldnames:
fieldnames.append(row['Event'])

if row['Medal'] == medalType or medalType is 'Total':
if country not in medalsPerCountry:
medalsPerCountry[country] = dict()
#medalsPerCountry[country]['CountryCode'] = country
if row['Medal'] == medal_type or medal_type == 'Total':
if country not in medals_per_country:
medals_per_country[country] = dict()
# medals_per_country[country]['CountryCode'] = country

if row['Event'] not in medalsPerCountry[country]:
medalsPerCountry[country][row['Event']] = 0
if row['Event'] not in medals_per_country[country]:
medals_per_country[country][row['Event']] = 0

medalsPerCountry[country][row['Event']] += 1
medals_per_country[country][row['Event']] += 1

#print(row['Games'], row['Event'], country, row['Medal'])
# print(row['Games'], row['Event'], country, row['Medal'])

# write the last file
writeCSV(lastGames, medalType, fieldnames, medalsPerCountry)
write_csv(last_games, medal_type, fieldnames, medals_per_country)


readCSV('Total')
readCSV('Bronze')
readCSV('Silver')
readCSV('Gold')
read_csv('Total')
read_csv('Bronze')
read_csv('Silver')
read_csv('Gold')

writeIndexJson()
write_index_json()
1 change: 1 addition & 0 deletions deploy/docker-compose.partial.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
version: '2.0'
40 changes: 20 additions & 20 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,21 @@
{
"name": "taco_server",
"description": "The server part for comparing large tabular data using Phovea",
"version": "3.0.0",
"author": {
"name": "The Caleydo Team",
"email": "contact@caleydo.org",
"url": "https://caleydo.org"
},
"license": "BSD-3-Clause",
"homepage": "http://caleydo.org",
"bugs": {
"url": "https://github.com/caleydo/taco_server/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/Caleydo/taco_server.git"
},
"files": [
"taco_server",
"__init__.py",
Expand All @@ -9,30 +26,13 @@
"docker_packages.txt"
],
"scripts": {
"check": "flake8",
"check": "flake8 --exclude=.git,venv,deploy,docs,__pycache__,node_modules",
"pretest": "npm run check",
"test": "test ! -d tests || python setup.py test",
"prebuild": "node -e \"process.exit(process.env.PHOVEA_SKIP_TESTS === undefined?1:0)\" || npm run test",
"build": "python build.py",
"build": "rm -rf build/source && find . -name '*.pyc' -delete && node buildPython.js && cp -r ./taco_server build/source/",
"predist": "npm run build && npm run docs",
"dist": "python setup.py bdist_egg && cd build && tar cvzf ../dist/taco_server.tar.gz *",
"dist": "python setup.py sdist bdist_wheel",
"docs": "sphinx-apidoc -o docs -f ./taco_server && sphinx-build ./docs build/docs"
},
"name": "taco_server",
"description": "The server part for comparing large tabular data using Phovea",
"homepage": "http://caleydo.org",
"version": "1.0.0-SNAPSHOT",
"author": {
"name": "The Caleydo Team",
"email": "contact@caleydo.org",
"url": "https://caleydo.org"
},
"license": "BSD-3-Clause",
"bugs": {
"url": "https://github.com/caleydo/taco_server/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/caleydo/taco_server.git"
}
}
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
-e git+https://github.com/phovea/phovea_server.git@develop#egg=phovea_server
enum==0.4.6
phovea_server>=4.0.0,<5.0.0
sklearn==0.0
Loading

0 comments on commit f24cf97

Please sign in to comment.