diff --git a/app/__init__.py b/app/__init__.py index 2fd3d82fefb5c8b25d7d0d93cae852662cfbc5c4..71c6ee7e0970ac6a6dea8fa8cde8bf4781bd880e 100644 --- a/app/__init__.py +++ b/app/__init__.py @@ -15,7 +15,7 @@ from flask_cors import CORS, cross_origin app = Flask(__name__) CORS(app) -#CORS(app, resources={r"/*": {"cors_allowed_origins":"*"} } ) +# CORS(app, resources={r"/*": {"cors_allowed_origins":"*"} } ) #app.config['BABEL_DEFAULT_LOCALE'] = 'fin' #app.config['BABEL_TRANSLATION_DIRECTORIES'] ='C:/Users/Timo/git/pet-rating/app/translations' @@ -68,9 +68,6 @@ def get_locale(): """ -# Run flask app with socketIO -socketio = SocketIO() -socketio.init_app(app) #mariabd mysql portti 3306 tarkista? @@ -81,6 +78,11 @@ migrate = Migrate(app, db) login = LoginManager(app) login.login_view = 'login' +# Run flask app with socketIO +socketio = SocketIO(app, cors_allowed_origins="*") +# socketio = SocketIO() +socketio.init_app(app) + # Register blueprints from .task.views import task_blueprint from .experiment.views import experiment_blueprint diff --git a/app/experiment/templates/experiment_statistics.html b/app/experiment/templates/experiment_statistics.html index 817470eafe2cfc7e485c63e9c1c60db30827c24d..20543144aff734ffef78da95aaedbff1a70df4ad 100644 --- a/app/experiment/templates/experiment_statistics.html +++ b/app/experiment/templates/experiment_statistics.html @@ -36,7 +36,25 @@ <tr> <td>Number of finished ratings:</td> <td>{{ finished_ratings }} - <a class="btn btn-primary btn-info float-right" href="{{ url_for('download_csv', exp_id=exp.idexperiment) }}" role="button">Export results (csv)</a> + + + <button data-value="{{ exp.idexperiment }}" class="btn btn-primary float-right get-csv-results"> + + export results + + </button> + + <div id="export-link-container" class="hidden"> + <a id="export-link" class="float-right" href="{{ url_for('experiment.download_csv', exp_id=exp.idexperiment) }}" role="button"></a> + <p id="export-error"></p> + </div> + + + <div class="progress hidden"> + <div id="export-results-bar" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="100" style="width: 0%"> + </div> + </div> + </td> </tr> </tbody> @@ -194,6 +212,8 @@ </table> <script src="{{ url_for('static', filename='lib/js/socket.io.js') }}" ></script> +<script src="{{ url_for('static', filename='js/urls.js') }}" ></script> <script src="{{ url_for('static', filename='js/getDrawing.js') }}" ></script> +<script src="{{ url_for('static', filename='js/getCSV.js') }}" ></script> {% endblock %} diff --git a/app/experiment/views.py b/app/experiment/views.py index 137d5c4f544310881b3dcb201b7c48e205e509c7..e969a1d937096d685d799f8b26232373e812faf7 100644 --- a/app/experiment/views.py +++ b/app/experiment/views.py @@ -1,38 +1,31 @@ - -from flask_cors import CORS, cross_origin -from app import socketio -from flask_socketio import emit -import embody_plot import os import secrets -import json +from datetime import date +from tempfile import mkstemp +from flask_socketio import emit +from sqlalchemy import and_ +from flask_login import login_required +from werkzeug import secure_filename from flask import ( - Flask, render_template, request, - session, flash, redirect, url_for, Blueprint, - jsonify + send_file ) -from wtforms import Form -from sqlalchemy import and_, update -from flask_login import login_required -from werkzeug import secure_filename - -from app import app, db +from app import app, db, socketio from app.routes import APP_ROOT from app.models import background_question, experiment from app.models import background_question_answer from app.models import page, question from app.models import background_question_option from app.models import answer_set, answer, forced_id -from app.models import user, trial_randomization +from app.models import trial_randomization from app.models import embody_answer, embody_question from app.forms import ( CreateBackgroundQuestionForm, @@ -40,8 +33,10 @@ from app.forms import ( EditQuestionForm, EditExperimentForm, UploadResearchBulletinForm, EditPageForm, RemoveExperimentForm, GenerateIdForm, CreateEmbodyForm ) -from app.utils import get_mean_from_slider_answers, map_answers_to_questions +from app.utils import get_mean_from_slider_answers, map_answers_to_questions, \ + generate_csv +import embody_plot # Stimuli upload folder setting #APP_ROOT = os.path.dirname(os.path.abspath(__file__)) @@ -957,6 +952,8 @@ def statistics(): questions = question.query.filter_by(experiment_idexperiment=exp_id).all() pages_and_questions = {} + ''' + for p in pages: questions_list = [(p.idpage, a.idquestion) for a in questions] pages_and_questions[p.idpage] = questions_list @@ -965,27 +962,34 @@ def statistics(): # those are in answer table as page_idpage and question_idquestion respectively slider_answers = {} for participant in participants: - if participant.answer_counter > 0: - answers = answer.query.filter_by( - answer_set_idanswer_set=participant.idanswer_set)\ - .order_by(answer.page_idpage)\ - .all() - # flatten pages and questions to list of tuples (page_id, question_id) - _questions = [ - item for sublist in pages_and_questions.values() for item in sublist] + if int(participant.answer_counter) == 0: + continue + + answers = answer.query.filter_by( + answer_set_idanswer_set=participant.idanswer_set)\ + .order_by(answer.page_idpage)\ + .all() - slider_answers[participant.session] = map_answers_to_questions( - answers, _questions) + # flatten pages and questions to list of tuples (page_id, question_id) + _questions = [ + item for sublist in pages_and_questions.values() for item in sublist] + slider_answers[participant.session] = map_answers_to_questions( + answers, _questions) + mean = get_mean_from_slider_answers(slider_answers) # slider_answers['mean'] = get_mean_from_slider_answers(slider_answers) slider_answers = { - 'mean': mean + 'mean': mean } + ''' + + slider_answers = {} + # Background question answers bg_questions = background_question.query.filter_by( experiment_idexperiment=exp_id).all() @@ -1012,8 +1016,27 @@ def statistics(): finished_ratings=finished_ratings, question_headers=question_headers, stimulus_headers=stimulus_headers, - embody_questions=embody_questions - ) + embody_questions=embody_questions) + + +@experiment_blueprint.route('/download_csv') +def download_csv(): + exp_id = request.args.get('exp_id', None) + path = request.args.get('path', None) + + filename = "experiment_{}_{}.csv".format( + exp_id, date.today().strftime("%Y-%m-%d")) + + path = '/tmp/' + path + + try: + return send_file(path, + mimetype='text/csv', + as_attachment=True, + attachment_filename=filename) + + finally: + os.remove(path) def remove_rows(rows): @@ -1024,27 +1047,55 @@ def remove_rows(rows): @socketio.on('connect', namespace="/create_embody") -def create_embody(): +def start_create_embody(): emit('success', {'connection': 'on'}) @socketio.on('draw', namespace="/create_embody") -def create_embody(page_id): - - print("DRAW") - - page = page_id["page"] - embody = page_id["embody"] - +def create_embody(meta): + page = meta["page"] + embody = meta["embody"] img_path = embody_plot.get_coordinates(page, embody) app.logger.info(img_path) emit('end', {'path': img_path}) -@socketio.on('end', namespace="/create_embody") -def create_embody(): - print("connection end") - emit('end', {'connection': 'off'}) +@socketio.on('connect', namespace="/download_csv") +def start_download_csv(): + emit('success', {'connection': 'Start generating CSV file'}) -# EOF +@socketio.on('generate_csv', namespace="/download_csv") +def download_csv(meta): + exp_id = meta["exp_id"] + + data = generate_csv(exp_id) + + # error handling + if isinstance(data, Exception): + emit('timeout', {'exc': str(data)}) + return + + # create temporary file + fd, path = mkstemp() + with os.fdopen(fd, 'w') as tmp: + tmp.write(data) + tmp.flush() + + # return path and filename to front so user can start downloading + filename = "experiment_{}_{}".format( + exp_id, date.today().strftime("%Y-%m-%d")) + path = path.split('/')[-1] + emit('file_ready', {'path': path, 'filename': filename}) + + +@socketio.on('end', namespace="/download_csv") +def end_download_csv(): + # TODO: not working solution... db session keeps hanging after socket session has ended + # mysqld timeout is set to 180s, so it kills hanging connections, but this is not a good solution + db.session.close() + + +@socketio.on('end', namespace="/create_embody") +def end_create_embody(): + db.session.close() diff --git a/app/models.py b/app/models.py index ed4669cda1336d2d73e7b845897a8b5e7488489d..c792add29192ab35999a8898e477eff576fd505b 100644 --- a/app/models.py +++ b/app/models.py @@ -153,6 +153,12 @@ class answer (db.Model): answer = db.Column(db.String(120)) page_idpage = db.Column(db.Integer, db.ForeignKey('page.idpage')) + def question(self): + return int(self.question_idquestion) + + def result(self): + return int(self.answer) + def __repr__(self): return "<idanswer = '%s', question_idquestion = '%s', answer_set_idanswer_set = '%s', answer = '%s', page_idpage = '%s'>" % (self.idanswer, self.question_idquestion, self.answer_set_idanswer_set, self.answer, self.page_idpage) @@ -167,6 +173,12 @@ class embody_answer (db.Model): db.Integer, db.ForeignKey('embody_question.idembody')) coordinates = db.Column(db.Text) + def question(self): + return self.embody_question_idembody + + def result(self): + return self.coordinates + def __repr__(self): return "<idanswer = '%s', answer_set_idanswer_set = '%s', coordinates = '%s', page_idpage = '%s', embody_question_idembody='%s' >" % (self.idanswer, self.answer_set_idanswer_set, self.coordinates, self.page_idpage, self.embody_question_idembody) diff --git a/app/routes.py b/app/routes.py index 13550adb00e16e92932debd4928639ccff2627d6..9d64ba14306b391f4546579683f54c383f1e1b8f 100644 --- a/app/routes.py +++ b/app/routes.py @@ -1,8 +1,7 @@ import os import random import secrets -from datetime import datetime, date -import json +from datetime import datetime from flask import (render_template, request, @@ -16,12 +15,11 @@ from flask_login import current_user, login_user, logout_user, login_required from app import app, db from app.models import background_question, experiment from app.models import background_question_answer -from app.models import page, question, embody_question, embody_answer +from app.models import page from app.models import background_question_option -from app.models import answer_set, answer, forced_id +from app.models import answer_set, forced_id from app.models import user, trial_randomization from app.forms import LoginForm, RegisterForm, StartWithIdForm -from app.utils import saved_data_as_file, map_answers_to_questions # Stimuli upload folder setting APP_ROOT = os.path.dirname(os.path.abspath(__file__)) @@ -336,164 +334,7 @@ def view_research_notification(): return render_template('view_research_notification.html', research_notification_filename=research_notification_filename) -@app.route('/download_csv') -@login_required -def download_csv(): - - exp_id = request.args.get('exp_id', None) - experiment_info = experiment.query.filter_by(idexperiment=exp_id).all() - - print(experiment_info) - - # answer sets with participant ids - participants = answer_set.query.filter_by( - experiment_idexperiment=exp_id).all() - - # pages aka stimulants - pages = page.query.filter_by(experiment_idexperiment=exp_id).all() - - # background questions - bg_questions = background_question.query.filter_by( - experiment_idexperiment=exp_id).all() - - # question - questions = question.query.filter_by(experiment_idexperiment=exp_id).all() - - # embody questions - embody_questions = embody_question.query.filter_by( - experiment_idexperiment=exp_id).all() - - csv = '' - - # create CSV-header - header = 'participant id;' - header += ';'.join([str(count) + '. bg_question: ' + question.background_question.strip() - for count, question in enumerate(bg_questions, 1)]) - - for idx in range(1, len(pages) + 1): - if len(questions) > 0: - header += ';' + ';'.join(['page' + str(idx) + '_' + str(count) + '. slider_question: ' + - question.question.strip() for count, question in enumerate(questions, 1)]) - - for idx in range(1, len(pages) + 1): - if len(embody_questions) > 0: - header += ';' + ';'.join(['page' + str(idx) + '_' + str(count) + '. embody_question: ' + - question.picture.strip() for count, question in enumerate(embody_questions, 1)]) - - csv += header + '\r\n' - answer_row = '' - - for participant in participants: - # list only finished answer sets - if participant.answer_counter > 0: - try: - # append user session id - answer_row += participant.session + ';' - - # append background question answers - bg_answers = background_question_answer.query.filter_by( - answer_set_idanswer_set=participant.idanswer_set).all() - bg_answers_list = [str(a.answer).strip() for a in bg_answers] - answer_row += ';'.join(bg_answers_list) + ';' - - # append slider answers - slider_answers = answer.query.filter_by( - answer_set_idanswer_set=participant.idanswer_set) \ - .order_by(answer.page_idpage) \ - .all() - - - pages_and_questions = {} - for p in pages: - questions_list = [(p.idpage, a.idquestion) for a in questions] - pages_and_questions[p.idpage] = questions_list - - _questions = [ - item for sublist in pages_and_questions.values() for item in sublist] - - answers_list = map_answers_to_questions(slider_answers, _questions) - - # typecast elemnts to string - answers_list = [str(a).strip() for a in answers_list] - - answer_row += ';'.join(answers_list) + \ - ';' if slider_answers else len( - questions) * len(pages) * ';' - - # append embody answers (coordinates) - # save embody answers as bitmap images - embody_answers = embody_answer.query.filter_by( - answer_set_idanswer_set=participant.idanswer_set) \ - .order_by(embody_answer.page_idpage) \ - .all() - - pages_and_questions = {} - for p in pages: - questions_list = [(p.idpage, a.idembody) for a in embody_questions] - pages_and_questions[p.idpage] = questions_list - - _questions = [ - item for sublist in pages_and_questions.values() for item in sublist] - - _embody_answers = map_answers_to_questions(embody_answers, _questions) - - answers_list = [] - - for answer_data in _embody_answers: - - if not answer_data: - answers_list.append('') - continue - - try: - coordinates = json.loads(answer_data.coordinates) - em_height = coordinates.get('height', 600) + 2 - em_width = coordinates.get('width', 200) + 2 - - coordinates_to_bitmap = [ - [0 for x in range(em_height)] for y in range(em_width)] - - coordinates = list( - zip(coordinates.get('x'), coordinates.get('y'))) - - for point in coordinates: - - try: - # for every brush stroke, increment the pixel - # value for every brush stroke - coordinates_to_bitmap[point[0]][point[1]] += 0.1 - except IndexError: - continue - - answers_list.append(json.dumps(coordinates_to_bitmap)) - - except ValueError as err: - app.logger(err) - - answer_row += ';'.join(answers_list) if embody_answers else \ - len(embody_questions) * len(pages) * ';' - - # old way to save only visited points: - # answers_list = [json.dumps( - # list(zip( json.loads(a.coordinates)['x'], - # json.loads(a.coordinates)['y']))) for a in embody_answers] - - except TypeError as err: - print(err) - - csv += answer_row + '\r\n' - answer_row = '' - - filename = "experiment_{}_{}.csv".format( - exp_id, date.today().strftime("%Y-%m-%d")) - - return saved_data_as_file(filename, csv) - - @app.route('/researcher_info') @login_required def researcher_info(): return render_template('researcher_info.html') - - -# EOF diff --git a/app/static/css/main.css b/app/static/css/main.css index eeb85710389fc8101bdd970f30d073288b47bf9d..f0c53e71c9961114b1d8988e1e6df057df6a1454 100644 --- a/app/static/css/main.css +++ b/app/static/css/main.css @@ -75,4 +75,15 @@ body { max-width: 90%; } -} \ No newline at end of file +} + + +#export-link-container { + margin-top: 20px; + padding: 10px; +} + +#export-error { + float:right; + color:red; +} diff --git a/app/static/js/getCSV.js b/app/static/js/getCSV.js new file mode 100644 index 0000000000000000000000000000000000000000..4b0313bbc2f96e59041548022644136f886ac246 --- /dev/null +++ b/app/static/js/getCSV.js @@ -0,0 +1,77 @@ + + + +$(document).ready(function() { + + var exportButton = $(".get-csv-results"); + + var progressBarContainer = $(".progress") + var progressBar = $("#export-results-bar") + + var exportLinkContainer = $("#export-link-container"); + var exportLink = $("#export-link"); + var exportError = $("#export-error"); + + // With sockets + function initConnection(socket) { + + socket.on('success', function(msg) { + exportButton.text('Generating file...') + exportButton.addClass('disabled') + }); + + socket.on('progress', function(data) { + progressBar.width(100*(data.done/data.from) + '%') + }); + + socket.on('timeout', function(data) { + // kill connection + + socket.emit('end') + socket.disconnect() + + exportButton.text('Export results') + exportButton.removeClass('disabled') + progressBarContainer.addClass("hidden") + + // show error + exportLinkContainer.removeClass("hidden") + exportError.text('Error: ' + data.exc) + }); + + socket.on('file_ready', function(file) { + + socket.emit('end') + socket.disconnect() + + exportButton.text('File is ready!') + + // show link + exportLinkContainer.removeClass("hidden") + exportLink.text('Download: ' + file.filename + '.csv') + + // set filename to exportlink + var href = exportLink.attr('href'); + href += '&path=' + file.path + $(exportLink).attr('href', href); + + // Remove progress bar + progressBarContainer.addClass("hidden") + progressBar.width('0%') + }); + } + + + exportButton.click(function(event) { + event.preventDefault() + + // Init socket + var socket = io.connect(exportURL); + initConnection(socket) + + // start generating csv file... + socket.emit('generate_csv', {exp_id: this.dataset.value}) + + progressBarContainer.removeClass("hidden") + }) +}) diff --git a/app/static/js/getDrawing.js b/app/static/js/getDrawing.js index 4504eb302e9aa17a0a4e9f5128036f3b2bd3ec82..70db2ae5142c7cda61360b362877a3b2d410a20d 100644 --- a/app/static/js/getDrawing.js +++ b/app/static/js/getDrawing.js @@ -1,9 +1,5 @@ -const baseURI = 'localhost/'; -//const baseURI = 'http://onni.utu.fi/'; -var getDrawingURI = baseURI + 'create_embody'; - $(document).ready(function() { var drawButtons = $(".embody-get-drawing"); @@ -23,7 +19,8 @@ $(document).ready(function() { }); socket.on('end', function(img) { - + // kill connection + socket.emit('end') socket.disconnect() // Draw image to statistic -page @@ -45,37 +42,13 @@ $(document).ready(function() { var socket = io.connect(getDrawingURI); initConnection(socket) - // var pageId = this.dataset.value.split('-')[0] var embodyId = this.dataset.value.split('-')[1] - console.log(pageId) - console.log(embodyId) - - socket.emit('draw', {page:pageId, embody:embodyId}) progressBarContainer.removeClass("hidden") scrollTo('plotted-image') - - /* - With AJAX -calls - var spinner = $(event.target.firstElementChild) - spinner.removeClass("hidden") - - $.ajax({ - url: getDrawingURI, - method: 'POST', - data: {page:pageId} - }).done(function(data) { - var source = JSON.parse(data).path; - console.log(source) - d = new Date() - imageContainer.attr("src", "/static/" + source + "?" +d.getTime()) - spinner.addClass("hidden") - }) - */ - }) function scrollTo(hash) { @@ -83,5 +56,4 @@ $(document).ready(function() { 'scrollTop': $('#'+hash).offset().top - 250 }, 500); } - }) diff --git a/app/static/js/urls.js b/app/static/js/urls.js new file mode 100644 index 0000000000000000000000000000000000000000..d2349e01d48a4087afe894ff7d0d22e1c849c4de --- /dev/null +++ b/app/static/js/urls.js @@ -0,0 +1,6 @@ + +const baseURI = 'localhost/'; +//const baseURI = 'http://onni.utu.fi/'; + +var exportURL = baseURI + 'download_csv'; +var getDrawingURI = baseURI + 'create_embody'; \ No newline at end of file diff --git a/app/utils.py b/app/utils.py index 2087e85972194902b94572114b58e4446986eb69..1da87b5d2c94e64a6f233e1daea9cf846c3a8f48 100644 --- a/app/utils.py +++ b/app/utils.py @@ -1,7 +1,29 @@ import os import tempfile +import time +import json from itertools import zip_longest +import concurrent.futures from flask import send_file +from flask_socketio import emit +from app import app +from app.models import background_question, background_question_answer, \ + page, question, answer_set, answer, embody_answer, embody_question + + +def timeit(method): + def timed(*args, **kw): + ts = time.time() + result = method(*args, **kw) + te = time.time() + if 'log_time' in kw: + name = kw.get('log_name', method.__name__.upper()) + kw['log_time'][name] = int((te - ts) * 1000) + else: + app.logger.info('{} {:2.2f} ms'.format(method.__name__, (te - ts) * 1000)) + return result + + return timed def map_values_to_int(values: dict): @@ -10,7 +32,6 @@ def map_values_to_int(values: dict): def calculate_mean(values: list) -> float: - print(values) n_answers = sum(x is not None for x in values) sum_of_answers = float(sum(filter(None, values))) mean = sum_of_answers / n_answers @@ -52,6 +73,12 @@ def get_values_from_list_of_answers(page_question, answers): return None +def question_matches_answer(question, answer): + if (answer.page_idpage == question[0] and answer.question() == question[1]): + return True + return False + + def map_answers_to_questions(answers, questions): ''' questions = [(4, 1), (4, 2), (5, 1), (5, 2), (6, 1), (6, 2)] @@ -59,7 +86,221 @@ def map_answers_to_questions(answers, questions): answers = [{p:6, q:1, a:100}, {p:6, q:2, a:99}] -> partial_answer = [None, None, None, None, 100, 99] + ''' + + # results = [] + results = list(map(lambda x: None, questions)) + + nth_answer = 0 + + for nth_question, question in enumerate(questions): + + try: + current_answer = answers[nth_answer] + except IndexError: + break + + if question_matches_answer(question, current_answer): + results[nth_question] = current_answer.result() + nth_answer += 1 + + return results + ''' return list(map( lambda x: get_values_from_list_of_answers(x, answers), questions)) + ''' + + +''' +select sub.answer_set_idanswer_set, group_concat(concat( + COALESCE(sub.aa, ''), + COALESCE(sub.ab, ''), + COALESCE(sub.ba, ''), + COALESCE(sub.bb, ''), + COALESCE(sub.ca, ''), + COALESCE(sub.cb, '') +)) +FROM ( + select *, + case when page_idpage = 4 and question_idquestion = 1 then answer end as aa, + case when page_idpage = 4 and question_idquestion = 2 then answer end as ab, + case when page_idpage = 5 and question_idquestion = 1 then answer end as ba, + case when page_idpage = 5 and question_idquestion = 2 then answer end as bb, + case when page_idpage = 6 and question_idquestion = 1 then answer end as ca, + case when page_idpage = 6 and question_idquestion = 2 then answer end as cb + from answer where answer_set_idanswer_set in ( select idanswer_set from answer_set where experiment_idexperiment = 2 and answer_counter != 0 ) +) as sub +group by sub.answer_set_idanswer_set; + + + + +# all possible page/question comobs +select distinct p.idpage, q.idquestion from question q join page p on p.experiment_idexperiment=q.experiment_idexperiment where p.experiment_idexperiment = 2 order by p.idpage,q.idquestion; +''' + + + +@timeit +def generate_csv(exp_id): + + # answer sets with participant ids + participants = answer_set.query.filter_by( + experiment_idexperiment=exp_id).all() + + # pages aka stimulants + pages = page.query.filter_by(experiment_idexperiment=exp_id).all() + + # background questions + bg_questions = background_question.query.filter_by( + experiment_idexperiment=exp_id).all() + + # question + questions = question.query.filter_by(experiment_idexperiment=exp_id).all() + + # embody questions + embody_questions = embody_question.query.filter_by( + experiment_idexperiment=exp_id).all() + + csv = '' + + # create CSV-header + header = 'participant id;' + header += ';'.join([str(count) + '. bg_question: ' + q.background_question.strip() + for count, q in enumerate(bg_questions, 1)]) + + for idx in range(1, len(pages) + 1): + if len(questions) > 0: + header += ';' + ';'.join(['page' + str(idx) + '_' + str(count) + '. slider_question: ' + + question.question.strip() for count, question in enumerate(questions, 1)]) + + for idx in range(1, len(pages) + 1): + if len(embody_questions) > 0: + header += ';' + ';'.join(['page' + str(idx) + '_' + str(count) + '. embody_question: ' + + question.picture.strip() for count, question in enumerate(embody_questions, 1)]) + + csv += header + '\r\n' + + # filter empty answer_sets + participants = list(filter(lambda participant: True if int( + participant.answer_counter) > 0 else False, participants)) + + len_participants = len(participants) + + # We can use a with statement to ensure threads are cleaned up promptly + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + # Start the load operations and mark each future with its URL + future_to_answer = { + executor.submit(generate_answer_row, participant, pages, questions, embody_questions): participant + for participant in participants} + + for nth, future in enumerate(concurrent.futures.as_completed(future_to_answer)): + # for testing purpose + # answer_row = future_to_answer[future] + try: + emit('progress', {'done': nth, 'from': len_participants}) + data = future.result() + csv += data + '\r\n' + except Exception as exc: + print('generated an exception: {}'.format(exc)) + return exc + + return csv + + +def generate_answer_row(participant, pages, questions, embody_questions): + # TODO: refactor + + with app.app_context(): + + answer_row = '' + + # append user session id + answer_row += participant.session + ';' + + # append background question answers + bg_answers = background_question_answer.query.filter_by( + answer_set_idanswer_set=participant.idanswer_set).all() + bg_answers_list = [str(a.answer).strip() for a in bg_answers] + answer_row += ';'.join(bg_answers_list) + ';' + + # append slider answers + slider_answers = answer.query.filter_by( + answer_set_idanswer_set=participant.idanswer_set) \ + .order_by(answer.page_idpage, answer.question_idquestion) \ + .all() + + pages_and_questions = {} + + for p in pages: + questions_list = [(p.idpage, a.idquestion) for a in questions] + pages_and_questions[p.idpage] = questions_list + + _questions = [ + item for sublist in pages_and_questions.values() for item in sublist] + + answers_list = map_answers_to_questions(slider_answers, _questions) + + # typecast elemnts to string + answers_list = [str(a).strip() for a in answers_list] + + answer_row += ';'.join(answers_list) + \ + ';' if slider_answers else len( + questions) * len(pages) * ';' + + # append embody answers (coordinates) + # save embody answers as bitmap images + embody_answers = embody_answer.query.filter_by( + answer_set_idanswer_set=participant.idanswer_set) \ + .order_by(embody_answer.page_idpage) \ + .all() + + pages_and_questions = {} + + for p in pages: + questions_list = [(p.idpage, a.idembody) for a in embody_questions] + pages_and_questions[p.idpage] = questions_list + + _questions = [ + item for sublist in pages_and_questions.values() for item in sublist] + + _embody_answers = map_answers_to_questions(embody_answers, _questions) + + answers_list = [] + + for answer_data in _embody_answers: + if not answer_data: + answers_list.append('') + continue + + try: + coordinates = json.loads(answer_data) + em_height = coordinates.get('height', 600) + 2 + em_width = coordinates.get('width', 200) + 2 + + coordinates_to_bitmap = [ + [0 for x in range(em_height)] for y in range(em_width)] + + coordinates = list( + zip(coordinates.get('x'), coordinates.get('y'))) + + for point in coordinates: + + try: + # for every brush stroke, increment the pixel + # value for every brush stroke + coordinates_to_bitmap[point[0]][point[1]] += 0.1 + except IndexError: + continue + + answers_list.append(json.dumps(coordinates_to_bitmap)) + + except ValueError as err: + app.logger(err) + + answer_row += ';'.join(answers_list) if embody_answers else \ + len(embody_questions) * len(pages) * ';' + + return answer_row diff --git a/config.py b/config.py index 087604733d53112a8fd65554e3163312f828cde4..632122a826069dbc4e2203de761d258a5c66a275 100644 --- a/config.py +++ b/config.py @@ -1,13 +1,12 @@ +from decouple import config import os basedir = os.path.abspath(os.path.dirname(__file__)) -from decouple import config class Config(object): - #seret key is set in __ini__.py - #SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess' - + # seret key is set in __ini__.py + #SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess' LANGUAGES = ['en', 'fi', 'fa', 'el', 'it', 'zh'] @@ -19,23 +18,25 @@ class Config(object): SQLALCHEMY_TRACK_MODIFICATIONS = False """ - #MariaDB mysql database settings + # MariaDB mysql database settings MYSQL_USER = config('MYSQL_USER') MYSQL_PASSWORD = config('MYSQL_PASSWORD') MYSQL_SERVER = config('MYSQL_SERVER') MYSQL_DB = config('MYSQL_DB') - - SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://'+MYSQL_USER+':'+MYSQL_PASSWORD+'@'+MYSQL_SERVER+'/'+MYSQL_DB+'?charset=utf8mb4' - + + SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://'+MYSQL_USER+':' + \ + MYSQL_PASSWORD+'@'+MYSQL_SERVER+'/'+MYSQL_DB+'?charset=utf8mb4' + SQLALCHEMY_TRACK_MODIFICATIONS = False SQLALCHEMY_ENGINE_OPTIONS = { - "pool_pre_ping": True, - "pool_recycle": 300, - "max_overflow": 30, - "pool_size": 20 + "pool_pre_ping": True, + "pool_recycle": 60, + "max_overflow": 30, + "pool_size": 20 } - + TEMPLATES_AUTO_RELOAD = True + DEBUG = False diff --git a/embody_plot.py b/embody_plot.py index 0d61fad96a73dc85a9245318d83f3cfd639edee1..f540d77fafee5a5fbdf446d50d2732bc08b4be4d 100644 --- a/embody_plot.py +++ b/embody_plot.py @@ -124,6 +124,8 @@ def timeit(method): def get_coordinates(idpage, idembody=None, select_clause=SELECT_BY_PAGE_AND_PICTURE): """Select all drawn points from certain stimulus and plot them onto the human body""" + + # init db db = MyDB() db.query(select_clause, (idpage,idembody)) @@ -141,6 +143,9 @@ def get_coordinates(idpage, idembody=None, select_clause=SELECT_BY_PAGE_AND_PICT else: plt = plot_coordinates(coordinates, DEFAULT_IMAGE_PATH) + # close db connection + db.__del__() + # Save image to ./app/static/ img_filename = 'PAGE-' + str(idpage) + '-' + DATE_STRING + '.png' plt.savefig(STATIC_PATH + img_filename) @@ -183,6 +188,7 @@ def plot_coordinates(coordinates, image_path=DEFAULT_IMAGE_PATH): # Total amount of points points_count = len(coordinates['coordinates']) + step = 1 # Load image to a plot image = mpimg.imread(image_path) @@ -205,21 +211,26 @@ def plot_coordinates(coordinates, image_path=DEFAULT_IMAGE_PATH): for idx, point in enumerate(coordinates["coordinates"]): try: - frame[int(point[1]), int(point[0])] = 1 + frame[int(point[1]), int(point[0])] = 1 except IndexError as err: - app.logger.info(err) - - point = ndimage.gaussian_filter(frame, sigma=5) - ax2.imshow(point, cmap='hot', interpolation='none') + app.logger.info(err) # Try to send progress information to socket.io - try: - emit('progress', {'done':idx+1/points_count, 'from':points_count}) - socketio.sleep(0) - except RuntimeError as err: - print(err) + if idx == 0: continue + if round((idx / points_count) * 100) % (step * 5) == 0: + try: + emit('progress', {'done': step * 5, 'from': 100}) + socketio.sleep(0.05) + except RuntimeError: + continue + + step += 1 + + point = ndimage.gaussian_filter(frame, sigma=5) + ax2.imshow(point, cmap='hot', interpolation='none') + image_mask = mpimg.imread(IMAGE_PATH_MASK) ax2.imshow(image_mask) @@ -234,7 +245,7 @@ def plot_coordinates(coordinates, image_path=DEFAULT_IMAGE_PATH): ax1.plot(coordinates["x"],coordinates["y"], 'ro', alpha=0.2) ax1.imshow(image, alpha=0.6) - app.logger.info("iamge plotted") + app.logger.info("image plotted") # return figure for saving/etc... return fig diff --git a/requirements.txt b/requirements.txt index dc5de6c46671b624a8d5237027838bd05175ab99..c44df47212d514b6e1462704d61863c0561e066b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,8 @@ Flask-Cors==3.0.7 Flask-Login==0.4.1 Flask-Migrate==2.2.1 Flask-Session==0.3.1 -Flask-SocketIO==3.3.2 +Flask-SocketIO==4.3.0 +# Flask-SocketIO==3.3.2 Flask-SQLAlchemy==2.3.2 Flask-Uploads==0.2.1 Flask-WTF==0.14.2 @@ -40,8 +41,10 @@ PyMySQL==0.9.3 pyparsing==2.3.1 python-dateutil==2.7.3 python-editor==1.0.3 -python-engineio==3.5.1 -python-socketio==3.1.2 +python-engineio==3.13.0 +# python-engineio==3.5.1 +# python-socketio==3.1.2 +python-socketio==4.6.0 pytz==2018.7 rope==0.12.0 scipy==1.2.1 @@ -54,4 +57,4 @@ visitor==0.1.3 Werkzeug==0.14.1 WTForms==2.2.1 WTForms-SQLAlchemy==0.1 -python-decouple \ No newline at end of file +python-decouple