From 793b887d9703d76c0e10db444244ffc701c9d53d Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 15 May 2018 11:37:13 +1000 Subject: [PATCH 01/53] Init Commit. New Adapter for the viewer and html template files. --- tvb/adapters/visualizers/__init__.py | 2 +- tvb/adapters/visualizers/new_dual_viewer.py | 121 ++++++++++++++++++ .../visualizers/new_dual_brain/__init__.py | 0 .../visualizers/new_dual_brain/controls.html | 0 .../visualizers/new_dual_brain/preview.html | 0 .../visualizers/new_dual_brain/view.html | 4 + 6 files changed, 126 insertions(+), 1 deletion(-) create mode 100644 tvb/adapters/visualizers/new_dual_viewer.py create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/__init__.py create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html diff --git a/tvb/adapters/visualizers/__init__.py b/tvb/adapters/visualizers/__init__.py index dd23bbdc4..74dcaaa72 100644 --- a/tvb/adapters/visualizers/__init__.py +++ b/tvb/adapters/visualizers/__init__.py @@ -35,6 +35,6 @@ __all__ = ["annotations_viewer", "brain", "complex_imaginary_coherence", "connectivity", "connectivity_edge_bundle", "covariance", "cross_coherence", "cross_correlation", "eeg_monitor", "fourier_spectrum", - "histogram", "ica", "local_connectivity_view", "matrix_viewer", "pca", "pearson_cross_correlation", + "histogram", "ica", "local_connectivity_view", "matrix_viewer","new_dual_viewer","pca", "pearson_cross_correlation", "pearson_edge_bundle", "pse_discrete", "pse_isocline", "region_volume_mapping", "sensors", "surface_view", "time_series", "time_series_volume", "tract", "topographic", "wavelet_spectrogram"] diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py new file mode 100644 index 000000000..e91bf93c0 --- /dev/null +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# +# +# TheVirtualBrain-Framework Package. This package holds all Data Management, and +# Web-UI helpful to run brain-simulations. To use it, you also need do download +# TheVirtualBrain-Scientific Package (for simulators). See content of the +# documentation-folder for more details. See also http://www.thevirtualbrain.org +# +# (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others +# +# This program is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software Foundation, +# either version 3 of the License, or (at your option) any later version. +# This program is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +# PARTICULAR PURPOSE. See the GNU General Public License for more details. +# You should have received a copy of the GNU General Public License along with this +# program. If not, see . +# +# +# CITATION: +# When using The Virtual Brain for scientific publications, please cite it as follows: +# +# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide, +# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013) +# The Virtual Brain: a simulator of primate brain network dynamics. +# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010) +# +# + + +from tvb.adapters.visualizers.brain import BrainViewer +from tvb.adapters.visualizers.eeg_monitor import EegMonitor +from tvb.adapters.visualizers.sensors import prepare_sensors_as_measure_points_params +from tvb.adapters.visualizers.sensors import prepare_mapped_sensors_as_measure_points_params +from tvb.basic.filters.chain import FilterChain +from tvb.core.entities.storage import dao +from tvb.datatypes.surfaces import EEGCap, CorticalSurface +from tvb.datatypes.surfaces import Surface +from tvb.datatypes.time_series import TimeSeries, TimeSeriesSEEG, TimeSeriesEEG, TimeSeriesRegion + + +class NewDualViewer(BrainViewer): + """ + New visualizer merging Brain 3D display and EEG lines display. + Same input as the DualBrainViewer + """ + _ui_name = "New Viewer for Time Series in 3D and 2D" + _ui_subsection = "new_brain_dual" + + def get_input_tree(self): + + return [{'name': 'time_series', 'label': 'Time Series', 'type': TimeSeries, 'required': True, + 'conditions': FilterChain(fields=[FilterChain.datatype + '.type', + FilterChain.datatype + '._has_surface_mapping'], + operations=["in", "=="], + values=[['TimeSeriesEEG', 'TimeSeriesSEEG', + 'TimeSeriesMEG', 'TimeSeriesRegion'], True])}, + + {'name': 'projection_surface', 'label': 'Projection Surface', 'type': Surface, 'required': False, + 'description': 'A surface on which to project the results. When missing, the first EEGCap is taken' + 'This parameter is ignored when InternalSensors measures.'}, + + {'name': 'shell_surface', 'label': 'Shell Surface', 'type': Surface, 'required': False, + 'description': "Wrapping surface over the internal sensors, to be displayed " + "semi-transparently, for visual purposes only."}] + + def populate_surface_fields(self, time_series): + """ + Prepares the urls from which the client may read the data needed for drawing the surface. + """ + + if isinstance(time_series, TimeSeriesRegion): + BrainViewer.populate_surface_fields(self, time_series) + return + + self.one_to_one_map = False + self.region_map = None + self.connectivity = None + + if self.surface is None: + eeg_cap = dao.get_generic_entity(EEGCap, "EEGCap", "type") + if len(eeg_cap) < 1: + raise Exception("No EEG Cap Surface found for display!") + self.surface = eeg_cap[0] + + def retrieve_measure_points_prams(self, time_series): + + if isinstance(time_series, TimeSeriesRegion): + return BrainViewer.retrieve_measure_points_prams(self, time_series) + + self.measure_points_no = time_series.sensors.number_of_sensors + + if isinstance(time_series, TimeSeriesEEG): + return prepare_mapped_sensors_as_measure_points_params(self.current_project_id, + time_series.sensors, self.surface) + + return prepare_sensors_as_measure_points_params(time_series.sensors) + + def launch(self, time_series, projection_surface=None, shell_surface=None): + + self.surface = projection_surface + + if isinstance(time_series, TimeSeriesSEEG) and shell_surface is None: + shell_surface = dao.try_load_last_entity_of_type(self.current_project_id, CorticalSurface) + + params = BrainViewer.compute_parameters(self, time_series, shell_surface) + params.update(EegMonitor().compute_parameters(time_series, is_extended_view=True)) + + params['isOneToOneMapping'] = False + params['brainViewerTemplate'] = 'view.html' + + if isinstance(time_series, TimeSeriesSEEG): + params['brainViewerTemplate'] = "internal_view.html" + # Mark as None since we only display shelf face and no point to load these as well + params['urlVertices'] = None + params['isSEEG'] = True + + return self.build_display_result("new_dual_brain/view", params, + pages=dict(controlPage="brain/extendedcontrols", + channelsPage="commons/channel_selector.html")) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/__init__.py b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html new file mode 100644 index 000000000..e69de29bb diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html new file mode 100644 index 000000000..e69de29bb diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html new file mode 100644 index 000000000..dd8cf3e67 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html @@ -0,0 +1,4 @@ +
+ + +
\ No newline at end of file From 3d0fe1c2442b1556a5c813a541cc69b6326b13f5 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 15 May 2018 13:08:45 +1000 Subject: [PATCH 02/53] TVB-2367 Add content in the controls panel --- tvb/adapters/visualizers/new_dual_viewer.py | 2 +- .../visualizers/new_dual_brain/controls.html | 103 ++++++++++++++++++ 2 files changed, 104 insertions(+), 1 deletion(-) diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py index e91bf93c0..fd6d8ef2a 100644 --- a/tvb/adapters/visualizers/new_dual_viewer.py +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -117,5 +117,5 @@ def launch(self, time_series, projection_surface=None, shell_surface=None): params['isSEEG'] = True return self.build_display_result("new_dual_brain/view", params, - pages=dict(controlPage="brain/extendedcontrols", + pages=dict(controlPage="new_dual_brain/controls", channelsPage="commons/channel_selector.html")) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index e69de29bb..52de77b32 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -0,0 +1,103 @@ +
+ + + +
+ +
+
+
+ +
+
+
+
+
+ + 0 + timesteps per Frame. +
+
+ + + + + + + +
+
+ +
+
+
+
+ +
+ + + + + +
+ +
+ +
+ + 3 +
+
+
+
+
+
+
+ +
+ + 1 +
+
+
+
+
+
+ +
+ + 1 +
+
+
+
+
+
+ +
+ + + ${min(number_of_visible_points, longestChannelLength)} + +
+
+ + All points already displayed! + + + + + A change here will trigger Graph redrawing from the first step! + +
+
+
+ + +
\ No newline at end of file From 821be6827ef1cf7f293d0438831b51a592dda016 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 15 May 2018 14:49:14 +1000 Subject: [PATCH 03/53] TVB-2367 Add 3D view's template --- tvb/adapters/visualizers/new_dual_viewer.py | 1 - .../new_dual_brain/dual_brain_3d_view.html | 24 +++++++++++++++++++ .../visualizers/new_dual_brain/view.html | 8 +++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py index fd6d8ef2a..739979d06 100644 --- a/tvb/adapters/visualizers/new_dual_viewer.py +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -111,7 +111,6 @@ def launch(self, time_series, projection_surface=None, shell_surface=None): params['brainViewerTemplate'] = 'view.html' if isinstance(time_series, TimeSeriesSEEG): - params['brainViewerTemplate'] = "internal_view.html" # Mark as None since we only display shelf face and no point to load these as well params['urlVertices'] = None params['isSEEG'] = True diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html new file mode 100644 index 000000000..1ea30ceb7 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -0,0 +1,24 @@ +
+ + + + + + + + + + + + +
diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html index dd8cf3e67..631c99a67 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html @@ -1,4 +1,12 @@
+
+ +
+ +
+ +
+
\ No newline at end of file From 9de5985c3fcf2de3401e1be738eba1c3ed3f1cd2 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 15 May 2018 22:21:03 +1000 Subject: [PATCH 04/53] TVB-2367 Merge the js files into the new viewer Add internal template for the 3D brain viewer Merge the js files for the egg viewer into a single js file --- tvb/adapters/visualizers/new_dual_viewer.py | 3 +- .../new_dual_brain/dual_brain_2d_view.html | 71 + .../dual_brain_3d_internal_view.html | 25 + .../new_dual_brain/dual_brain_3d_view.html | 25 +- .../new_dual_brain/scripts/dualBrainViewer.js | 1235 +++++++++++++++++ .../visualizers/new_dual_brain/view.html | 22 +- 6 files changed, 1365 insertions(+), 16 deletions(-) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py index 739979d06..8fd6a9134 100644 --- a/tvb/adapters/visualizers/new_dual_viewer.py +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -108,9 +108,10 @@ def launch(self, time_series, projection_surface=None, shell_surface=None): params.update(EegMonitor().compute_parameters(time_series, is_extended_view=True)) params['isOneToOneMapping'] = False - params['brainViewerTemplate'] = 'view.html' + params['brainViewerTemplate'] = 'dual_brain_3d_view.html' if isinstance(time_series, TimeSeriesSEEG): + params['brainViewerTemplate'] = "dual_brain_3d_internal_view.html" # Mark as None since we only display shelf face and no point to load these as well params['urlVertices'] = None params['isSEEG'] = True diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html new file mode 100644 index 000000000..27c4b18b2 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -0,0 +1,71 @@ +
+ + + + + + + + + + +
+ +
    + +
  • + +
  • + + +
  • + +
  • + +
  • + +
  • + + +
  • + Channel + 0 +
  • + +
  • + Time + 0 +
  • + +
  • + Value + 0 +
  • +
    + + + + + +
  • + + ${drawTimeseriesSelectorButton( + name, tsStateVars[name], tsModes[name], + groupedLabels[idx], initialSelection[idx], + containerId="channelSelector" + str(idx), + buttonTitle="Select signals from Input %d" % (idx+1))} +
  • +
+ + + + + +
+
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html new file mode 100644 index 000000000..978302e7b --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html @@ -0,0 +1,25 @@ +
+ + + + + + + + + + +
test
+ + +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html index 1ea30ceb7..a216f2cf7 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -1,24 +1,21 @@ -
+
- - - - + - -
+
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js new file mode 100644 index 000000000..c8069bd25 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -0,0 +1,1235 @@ +/** + * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * Web-UI helpful to run brain-simulations. To use it, you also need do download + * TheVirtualBrain-Scientific Package (for simulators). See content of the + * documentation-folder for more details. See also http://www.thevirtualbrain.org + * + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others + * + * This program is free software: you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software Foundation, + * either version 3 of the License, or (at your option) any later version. + * This program is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU General Public License for more details. + * You should have received a copy of the GNU General Public License along with this + * program. If not, see . + * + **/ + +/* globals doAjaxCall, readDataPageURL, HLPR_readJSONfromFile */ + +// //it contains all the points that have to be/have been displayed (it contains all the points from the read file); +// //it is an array of arrays (each array contains the points for a certain line chart) +var AG_allPoints = []; +// it supplies the labels for x axis (time in milliseconds) +var AG_time = []; +//it is used for clearing timing events (the event that calls the drawGraph method after a specified time-interval) +var t = null; +//how many elements will be visible on the screen +//computed on the server +var AG_numberOfVisiblePoints = 0; +//all the points that are visible on the screen at a certain moment; the points are read from the AG_allPoints array +//and are translated with a value equal to [AG_translationStep * (AG_noOfLines - the index of the current line)] +//THE FORM of this matrix is: [ [[t1, a1], [t2, a2], ...], [[t1, b1], [t2, b2], ...], ..., [[t1, n1], [t2, n2], ...]] +// t1, t2, ... - represents time that is visible on the screen at a certain moment; +// a1, a2,... - represents the translated values +var AG_displayedPoints = []; +//All the times values that are displayed at a certain moment. To be used by the vertical time line. +var AG_displayedTimes = []; +//the last element that was displayed on the screen is located at this index; the index refers to AG_allPoints array +var AG_currentIndex = 0; +//this var should be set to the length of the AG_allPoints array +var AG_noOfLines = 0; +// the step used for translating the drawn line charts; we translate the drawn line charts because we don't want them to overlap +// the lines will be translated with AG_translationStep * AG_computedStep +var AG_translationStep = 1; +// a scaling factor for the displayed signal +var AG_scaling = 1; +// this var is computed on the server. It is used for line translation (AG_translationStep * AG_computedStep). +var AG_computedStep = 50; +//The normalization steps for each of the channels, in order to bring them centered near the channel bar +var AG_normalizationSteps = []; +//If the animation is paused using pause/start button +var AG_isStopped = true; +//If animation speed is set at a 0 value +var AG_isSpeedZero = false; +//the number of points that are shifted/unshift at a moment +var noOfShiftedPoints = 1; +// List of channels that will be submited on a change of the displayed channels +var AG_submitableSelectedChannels = []; +// contains the indexes of the channels that are displayed +var displayedChannels = []; +// a list of urls pointing to the files from where we should read the time +var timeSetUrls = []; +//a list containing the number of channel in each file specified in 'dataSetUrls' fields +var noOfChannelsPerSet = []; +// the number of points from the longest channel +var maxChannelLength = 0; +// the maximum number of data files from all the submited datatypes +var maxDataFileIndex = 0; +// represents the file index from the dataset that is displayed in the chart +var currentDataFileIndex = 0; +// contains the parsed data for the next file from the dataset +var nextData = []; +// contains the time for the next file from the dataset +var nextTimeData = []; +// true only if the next file from dataset was loaded into memory +var isNextDataLoaded = false; +// true only if the next time data was loaded into memory +var isNextTimeDataLoaded = false; +// true only if the the process of loading a file is started +var AG_isLoadStarted = false; +// this is the number of steps left before updating the next file +var threshold = 10; +// the amount of data that has passed +var totalPassedData = 0; +// the number of channels +var totalNumberOfChannels = 0; +// true only if any of the displayed channels contains NaN values +var nanValueFound = false; +//Channel prefix for each array of data +var channelPrefix = "Channel: "; +// +var totalTimeLength = 0; +//Default values for the x and y axis of the plot +//NOTE: do not remove from the axis AG_options 'labelWidth' and 'labelHeight' because +//this will slow down the animation +var lbl_x_width = 100; +var lbl_x_height = 30; +var zoom_range = [0.1, 20]; + +var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; + +// the index of the cached file (the file that was loaded asynchronous) +var cachedFileIndex = 0; +var labelX = ""; +var chartTitle = ""; +//The displayed labels for the graph +var chanDisplayLabels = []; +// setup plot +var AG_options = { + series: { + shadowSize: 0, + color: 'blue' + }, // drawing is faster without shadows + lines: { + lineWidth: 1, + show: true + }, + yaxis: AG_defaultYaxis, + xaxis: AG_defaultXaxis, + grid: { + backgroundColor: 'white', + hoverable: true, + clickable: true + }, + points: { + show: false, + radius: 0.001 + }, + zoom: { + interactive: false + }, + selection: { + mode: "xy" + }, + legend: { + show: false + }, + hooks: { + processRawData: [processRawDataHook] + } +}; + +var DEFAULT_MAX_CHANNELS = 10; +var plot = null; + +var followingLine = []; +//The required position from which the following vertical time line will start moving with the array +//Expressed as a number from [0, 1], 0 - start from begining, 1 start only at end +var procentualLinePosition = 0.5; +//The actual position in the graph of the following vertical line. Start from -speed to account for the initial translation. +var currentLinePosition = 0; +//The number of points used to display the vertical line. +var numberOfPointsForVerticalLine = 1000; +var isDoubleView = false; + +var AG_homeViewYValues = []; +var AG_homeViewXValues = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +//This will be set to true in the launch_viewer method called by burst small previews +var isSmallPreview = false; + +var targetVerticalLinePosition; + +// The base url for calling any methods on a given datatype +var baseDataURLS = []; +var nrOfPagesSet = []; +var dataPageSize = []; +var tsModes = [0, 0, 0]; +var tsStates = [0, 0, 0]; +var longestChannelIndex = 0; + +// region selection component +var AG_regionSelector = null; +// State mode selector. Used as a global only in dual view +var AG_modeSelector = null; + +function resizeToFillParent() { + const canvas = $('#EEGcanvasDiv'); + let container, width, height; + + if (!isSmallPreview) { + // Just use parent section width and height. For width remove some space for the labels to avoid scrolls + // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. + container = canvas.parent(); + width = container.width() - 40; + height = container.height() - 80; + } else { + container = $('body'); + width = container.width() - 40; + height = container.height() - 20; + } + canvas.width(width).height(height); +} + +window.onresize = function () { + resizeToFillParent(); + // redrawPlot(plot.getData()); +}; + +/** + * Animated graph entry point + */ +function AG_startAnimatedChart(ag_settings) { + isSmallPreview = false; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + drawSliderForScale(); + drawSliderForAnimationSpeed(); + _AG_init_selection(ag_settings.measurePointsSelectionGIDs); + + bindHoverEvent(); + initializeCanvasEvents(); + if (!ag_settings.extended_view) { + bindZoomEvent(); + } +} + +function AG_startAnimatedChartPreview(ag_settings) { + isSmallPreview = true; + AG_isStopped = true; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + + // Initialize AG_submitableSelectedChannels + // warning: Assumes channel values are a range + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + for (let i = 0; i < defaultSelectionLength; i++) { + AG_submitableSelectedChannels.push(i); + } + } + + refreshChannels(); +} + +function AG_rePaginate(number_of_visible_points) { + _AG_initPaginationState(number_of_visible_points); + $('#display-page-size').html('' + number_of_visible_points); + refreshChannels(); + if (isDoubleView) { + initActivityData(); + } +} + +/** + * Initialize global state. Part of the AG startup. + * @private + */ +function _AG_initGlobals(ag_settings) { + isDoubleView = ag_settings.extended_view; + // dataSetUrls = $.parseJSON(dataSetPaths); + baseDataURLS = ag_settings.baseURLS; + nrOfPagesSet = ag_settings.nrOfPages; + dataPageSize = ag_settings.pageSize; + chanDisplayLabels = ag_settings.channelLabels; + noOfChannelsPerSet = ag_settings.channelsPerSet; + timeSetUrls = ag_settings.timeSetPaths; + maxChannelLength = parseInt(ag_settings.pageSize); + AG_normalizationSteps = ag_settings.normalizedSteps; + setMaxDataFileIndex(nrOfPagesSet); + totalNumberOfChannels = ag_settings.noOfChannels; + totalTimeLength = ag_settings.totalLength; + nanValueFound = ag_settings.nan_value_found; + AG_computedStep = ag_settings.translationStep; +} + +/** + * Initialize pagination. Part of AG startup. + * @private + */ +function _AG_initPaginationState(number_of_visible_points) { + AG_numberOfVisiblePoints = parseInt(number_of_visible_points); + if (AG_numberOfVisiblePoints > maxChannelLength) { + AG_numberOfVisiblePoints = maxChannelLength; + } + targetVerticalLinePosition = AG_numberOfVisiblePoints * procentualLinePosition; +} + +/** + * Misc common startup logic. Part of AG startup + * @private + */ +function _AG_preStart() { + resizeToFillParent(); +} + +/** + * Creates a selection component for each time series displayed by this eeg view + * Part of AG startup + * The order of the filterGids determines the order of the selectors + * It must have the same ordering as all other timeseries arrays + * @private + */ +function _AG_init_selection(filterGids) { + let i; + let selectors = []; + + /** + * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels + * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) + */ + function getSelectedChannelsAsGlobalIndices() { + let all_selected = []; + let offset = 0; + + for (let i = 0; i < selectors.length; i++) { + const selector = selectors[i]; + const selected_in_current = selector.val(); + + for (let j = 0; j < selected_in_current.length; j++) { + all_selected.push(offset + parseInt(selected_in_current[j], 10)); + } + offset += selector._allValues.length; + } + return all_selected; + } + + // init selectors + let selectorId, selector; + + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.regionSelector(selectorId, {filterGid: filterGids[i]}); + selector.change(function (current_selection) { + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + refreshChannels(); + }); + selectors.push(selector); + } + // the first selector is special. we select by default some channels in it and in case of a dual view + // his selection is synchronized with the brain + AG_regionSelector = selectors[0]; + + // Initialize AG_submitableSelectedChannels + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + // we take the values form the dom, a range(defaultSelectionLength) is not a valid selection if there are multiple time series + AG_submitableSelectedChannels = AG_regionSelector._allValues.slice(0, defaultSelectionLength); + AG_regionSelector.val(AG_submitableSelectedChannels); + } + + // Init the mode selection components. Assumes that there are part of the selector dom + let modeSelectors = []; + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.modeAndStateSelector(selectorId, i); + selector.modeChanged(_AG_changeMode); + selector.stateVariableChanged(_AG_changeStateVariable); + modeSelectors.push(selector); + } + // The dual view needs to subscribe to this selector; so we save it like AG_regionSelector + AG_modeSelector = modeSelectors[0]; + + refreshChannels(); +} + +/** + * Read speed from the dom + * @param defaultSpeed default speed when there is no speed slider + * @private + */ +function _AG_get_speed(defaultSpeed) { + let speed = defaultSpeed; + if (!isSmallPreview && !isDoubleView) { + speed = $("#ctrl-input-speed").slider("value"); + } + return speed; +} + +/* + * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for + * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a + * 'Home' action in a series of zoom events. + */ +function AG_createYAxisDictionary(nr_channels) { + +} + +function refreshChannels() { + submitSelectedChannels(false); + // drawGraph(false, noOfShiftedPoints); +} + +function _AG_changeMode(tsIndex, val) { + tsModes[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_changeStateVariable(tsIndex, val) { + tsStates[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_getSelectedDataAndLongestChannelIndex(data) { + let offset = 0; + let selectedData = []; + let channelLengths = []; + + for (let i = 0; i < data.length; i++) { + const selectedChannels = getDisplayedChannels(data[i], offset); + offset += data[i].length; + if (selectedChannels.length > 0) { + channelLengths.push(selectedChannels[0].length); + } else { + channelLengths.push(-1); + } + selectedData = selectedData.concat(selectedChannels); + } + const longestChannelIndex = channelLengths.indexOf(Math.max.apply(Math, channelLengths)); + return {selectedData: selectedData, longestChannelIndex: longestChannelIndex} +} + +/* + * Get required data for the channels in AG_submitableSelectedChannels. If none + * exist then just use the previous 'displayedChannels' (or default in case of first run). + */ +function submitSelectedChannels(isEndOfData) { + + AG_currentIndex = AG_numberOfVisiblePoints; + if (AG_submitableSelectedChannels.length === 0) { + AG_submitableSelectedChannels = displayedChannels.slice(); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + AG_allPoints = []; + displayedChannels = AG_submitableSelectedChannels.slice(0); + generateChannelColors(displayedChannels.length); + + let results = []; + for (let i = 0; i < nrOfPagesSet.length; i++) { + const dataURL = readDataPageURL(baseDataURLS[i], 0, dataPageSize, tsStates[i], tsModes[i]); + const data = HLPR_readJSONfromFile(dataURL); + results.push(parseData(data, i)); + } + const r = _AG_getSelectedDataAndLongestChannelIndex(results); + AG_allPoints = AG_allPoints.concat(r.selectedData); + longestChannelIndex = r.longestChannelIndex; + + // keep data only for the selected channels + AG_noOfLines = AG_allPoints.length; + } + + AG_displayedPoints = []; + AG_displayedTimes = []; + for (let ii = 0; ii < AG_noOfLines; ii++) { + AG_displayedPoints.push([]); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + //read time + readTimeData(0, false); + AG_time = nextTimeData.slice(0); + } + // reset data + nextData = []; + nextTimeData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + currentDataFileIndex = 0; + totalPassedData = 0; + currentLinePosition = 0; + if (nanValueFound) { + displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); + } + + // draw the first 'AG_numberOfVisiblePoints' points + // redrawCurrentView(); + if (!isSmallPreview) { + AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; + AG_scaling = $("#ctrl-input-scale").slider("value"); + } else { + AG_translationStep = 1; + } + + AG_createYAxisDictionary(AG_noOfLines); + // redrawPlot([]); + resetToDefaultView(); + if (AG_isStopped) { + // AG_isStopped = false; + // drawGraph(false, noOfShiftedPoints); + AG_isStopped = true; + } else { + // drawGraph(false, noOfShiftedPoints); + } +} + +/** + * This method decides if we are at the beginning or end of the graph, in which case we only need + * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. + */ +function shouldMoveLine(direction, shiftNo) { + shiftNo = shiftNo || 1; + let isEndOfGraph = false; + let isStartOfGraph = false; + if (direction === 1) { + isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); + isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); + if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { + isEndOfGraph = false; + } + } else { + isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); + isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); + if (AG_displayedTimes[currentLinePosition] <= 0) { + isStartOfGraph = false; + } + } + + return isStartOfGraph || isEndOfGraph; +} + +var isEndOfData = false; +var AG_channelColorsDict = {}; +var AG_reversedChannelColorsDict = {}; + +/* + * Generate different colors for each channel. + */ +function generateChannelColors(nr_of_channels) { + AG_channelColorsDict = {}; + AG_reversedChannelColorsDict = {}; + let step = parseInt(255 / nr_of_channels); + for (let i = 0; i < nr_of_channels; i++) { + const color = "rgb(" + 250 * (i % 2) + "," + (200 - i * step) + "," + 220 * ((i + 1) % 2) + ")"; + AG_channelColorsDict[color] = i; + AG_reversedChannelColorsDict[i] = color; + } +} + +/* + * Get y-axis labels and update colors to correspond to each channel + */ +function setLabelColors() { + const labels = $('.flot-y-axis .tickLabel'); + for (let i = 0; i < labels.length; i++) { + const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); + if (chan_idx >= 0) { + labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; + labels[i].style.left = 80 + (i % 2) * 40 + 'px'; + } + } +} + +/* + * This method draw the actual plot. The 'executeShift' parameter decides if a shift is + * to be done, or just use the previous data points. 'shiftNo' decides the number of points + * that will be shifted. + */ +function drawGraph(executeShift, shiftNo) { + +} + +/* + * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. + */ +function redrawPlot(data) { + // const target = $('#EEGcanvasDiv'); + // const resizerChildren = target.children('.ui-resizable-handle'); + // for (let i = 0; i < resizerChildren.length; i++) { + // target[0].removeChild(resizerChildren[i]); + // } + // plot = $.plot(target, data, $.extend(true, {}, AG_options)); + // for (let j = 0; j < resizerChildren.length; j++) { + // target[0].appendChild(resizerChildren[j]); + // } + // setLabelColors(); +} + + +/** + * This hook will be called before Flot copies and normalizes the raw data for the given + * series. If the function fills in datapoints.points with normalized + * points and sets datapoints.pointsize to the size of the points, + * Flot will skip the copying/normalization step for this series. + */ +function processRawDataHook(plot, series, data, datapoints) { + datapoints.format = [ + {x: true, number: true, required: true}, + {y: true, number: true, required: true} + ]; + datapoints.pointsize = 2; + + for (let i = 0; i < data.length; i++) { + datapoints.points.push(data[i][0]); + datapoints.points.push(data[i][1]); + } + + series.xaxis.used = series.yaxis.used = true; +} + + +/** + * Translate the given value. + * We use this method to translate the values for the drawn line charts because we don't want them to overlap. + * + * @param value the value that should be translated. + * @param index the number of AG_translationSteps that should be used for translating the given value. + * @return {number} + */ +function AG_addTranslationStep(value, index) { + return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; +} + +function getTimeoutBasedOnSpeed() { + const currentAnimationSpeedValue = _AG_get_speed(40); + if (currentAnimationSpeedValue === 0) { + return 300; + } + const timeout = 10 - Math.abs(currentAnimationSpeedValue); + if (timeout === 9) { + return 3000; + } + if (timeout === 8) { + return 2000; + } + if (timeout === 7) { + return 1000; + } + return timeout * 100 + 25; +} + +/* + * Load the data from a given step and center plot around that step. + */ +function loadEEGChartFromTimeStep(step) { + // Read all data for the page in which the selected step falls into + const chunkForStep = Math.floor(step / dataPageSize); + const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); + const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; + AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); + AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); + + totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page + currentDataFileIndex = chunkForStep; + AG_displayedPoints = []; + const indexInPage = step % dataPageSize; // This is the index in the current page that step will have + let fromIdx, toIdx; + currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times + if (indexInPage <= AG_numberOfVisiblePoints / 2) { + if (chunkForStep === 0) { + // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first + // AG_numberOfVisiblePoints values + AG_currentIndex = AG_numberOfVisiblePoints; + currentLinePosition = indexInPage; + prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from before this page + addFromPreviousPage(indexInPage, chunkForStep); + } + } else { + if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { + if (chunkForStep >= nrOfPagesSet[0] - 1) { + // We are at the end of the graph. The line is starting to move further right from the middle position. We are just + // displaying the last AG_numberOfVisiblePoints from the last page + if (AG_time.length > AG_numberOfVisiblePoints) { + fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; + } else { + fromIdx = 0; + } + toIdx = AG_time.length - 1; + AG_currentIndex = toIdx; + currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from after this page + addFromNextPage(indexInPage, chunkForStep); + } + } else { + // We are somewhere in the middle of the graph. + fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; + toIdx = indexInPage + AG_numberOfVisiblePoints / 2; + AG_currentIndex = toIdx; + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } + } + nextData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the previous page. + */ +function addFromPreviousPage(indexInPage, currentPage) { + + const previousPageUrl = readDataPageURL(baseDataURLS[0], (currentPage - 1) * dataPageSize, currentPage * dataPageSize, tsStates[0], tsModes[0]); + let previousData = parseData(HLPR_readJSONfromFile(previousPageUrl), 0); + previousData = getDisplayedChannels(previousData, 0).slice(0); + const previousTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage - 1]); + // Compute which slices we would need from the 'full' two-pages data. + // We only need the difference so to center indexInPage at AG_numberOfVisiblePoints / 2 + let fromIdx, toIdx; + fromIdx = previousData[0].length - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is from where we need to read from previous data + AG_currentIndex = toIdx = AG_numberOfVisiblePoints - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is where we need to add from the current page + // Just generate displayed point and displayed times now + for (let idx = 0; idx < previousData.length; idx++) { + let idy; + let oneLine = []; + // Push data that is from previos slice + for (idy = fromIdx; idy < previousData[0].length; idy++) { + oneLine.push([previousTimeData[idy], AG_addTranslationStep(previousData[idx][idy], idx)]); + } + // Now that that is from our current slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = previousTimeData.slice(fromIdx).concat(AG_time.slice(0, toIdx)); + previousData = null; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the next page. + */ +function addFromNextPage(indexInPage, currentPage) { + + const followingPageUrl = readDataPageURL(baseDataURLS[0], (currentPage + 1) * dataPageSize, (currentPage + 2) * dataPageSize, tsStates[0], tsModes[0]); + let followingData = parseData(HLPR_readJSONfromFile(followingPageUrl), 0); + followingData = getDisplayedChannels(followingData, 0).slice(0); + const followingTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage + 1]); + let fromIdx, toIdx; + fromIdx = indexInPage - (AG_numberOfVisiblePoints / 2); // We need to read starting from here from the current page + AG_currentIndex = toIdx = fromIdx + AG_numberOfVisiblePoints - AG_allPoints[0].length; // We need to read up to here from next page + for (let idx = 0; idx < AG_allPoints.length; idx++) { + let idy; + const oneLine = []; + // Push data that is from this slice + for (idy = fromIdx; idy < AG_allPoints[0].length; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + // Now that that is from next slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([followingTimeData[idy], AG_addTranslationStep(followingData[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = AG_time.slice(fromIdx).concat(followingTimeData.slice(0, toIdx)); + // Since next page is already loaded, that becomes the current page + AG_allPoints = followingData; + AG_time = followingTimeData; + totalPassedData = (currentPage + 1) * dataPageSize; + currentDataFileIndex = currentPage + 1; + isNextDataLoaded = true; + isNextTimeDataLoaded = true; +} + +/* + * Just re-populate whole displayedPoints and displayedTimes given a start and end index. + */ +function prepareDisplayData(fromIdx, toIdx, pointsArray, timeArray) { + + for (let idx = 0; idx < pointsArray.length; idx++) { + let oneLine = []; + for (let idy = fromIdx; idy < toIdx; idy++) { + oneLine.push([timeArray[idy], AG_addTranslationStep(pointsArray[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = timeArray.slice(fromIdx, toIdx) +} + +/* + * Read the next data file asyncronously. Also get the corresponding time data file. + */ +function loadNextDataFile() { + AG_isLoadStarted = true; + const nx_idx = getNextDataFileIndex(); + cachedFileIndex = nx_idx; + AG_readFileDataAsynchronous(nrOfPagesSet, noOfChannelsPerSet, nx_idx, maxChannelLength, 0); + readTimeData(nx_idx, true); +} + +function changeCurrentDataFile() { + if (!isNextDataLoaded || !isNextTimeDataLoaded) { + return; + } + + if (cachedFileIndex !== getNextDataFileIndex()) { + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + nextData = []; + nextTimeData = []; + return; + } + + const speed = _AG_get_speed(100); + const longestChannelLength = AG_allPoints[longestChannelIndex].length; + + if (speed > 0) { + totalPassedData = totalPassedData + longestChannelLength; + if (longestChannelLength < AG_currentIndex) { + AG_currentIndex = -(longestChannelLength - AG_currentIndex); + } else { + AG_currentIndex = 0; + } + } else if (speed < 0) { + totalPassedData = totalPassedData - longestChannelLength; + if (totalPassedData < 0) { + totalPassedData = 0; + } + } else { + return; + } + + AG_allPoints = nextData.slice(0); + nextData = []; + AG_time = nextTimeData.slice(0); + nextTimeData = []; + currentDataFileIndex = getNextDataFileIndex(); + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + + if (speed < 0) { + AG_currentIndex = longestChannelLength + AG_currentIndex; + } +} + +function shouldLoadNextDataFile() { + if (!AG_isLoadStarted && maxDataFileIndex > 0) { + const nextFileIndex = getNextDataFileIndex(); + const speed = _AG_get_speed(1); // Assume left to right pass of data + if (currentDataFileIndex !== nextFileIndex) { + if ((speed > 0) && (maxChannelLength - AG_currentIndex < threshold * AG_numberOfVisiblePoints)) { + return true; + } + if ((speed < 0) && (AG_currentIndex - AG_numberOfVisiblePoints < threshold * AG_numberOfVisiblePoints)) { + return true; + } + } + } + return false; +} + +/* + * In case of multiple arrays find out which has the most data files that need + * to be loaded. + */ +function setMaxDataFileIndex(nrOfPagesPerArray) { + let max_ln = 0; + for (let i = 0; i < nrOfPagesPerArray.length; i++) { + if (nrOfPagesPerArray[i] > max_ln) { + max_ln = nrOfPagesPerArray[i]; + } + } + maxDataFileIndex = max_ln - 1; +} + +/* + * Return the index of the next data file that should be loaded. + */ +function getNextDataFileIndex() { + let nextIndex; + const speed = _AG_get_speed(100); + if (speed > 0) { + nextIndex = currentDataFileIndex + 1; + if (nextIndex >= maxDataFileIndex) { + return maxDataFileIndex; + } + } else { + nextIndex = currentDataFileIndex - 1; + if (nextIndex <= 0) { + return 0; + } + } + return nextIndex; +} + +function AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex) { + if (dataSetIndex >= nrOfPages.length) { + isNextDataLoaded = true; + // keep data only for the selected channels + const r = _AG_getSelectedDataAndLongestChannelIndex(nextData); + longestChannelIndex = r.longestChannelIndex; + nextData = r.selectedData; //todo: occasional shape mismatch 3d <- 2d + return; + } + if (nrOfPages[dataSetIndex] - 1 < currentFileIndex && AG_isLoadStarted) { + // todo: assumed that this is computing a padding for smaller signals. check if this is really the purpose of this + let j; + let padding = []; + let oneChannel = []; + for (j = 0; j < maxChannelLength; j++) { + oneChannel.push(0); + } + for (j = 0; j < noOfChannelsPerSet[dataSetIndex]; j++) { + padding.push(oneChannel); + } + nextData.push(padding); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } else { + doAjaxCall({ + url: readDataPageURL(baseDataURLS[dataSetIndex], currentFileIndex * dataPageSize, (currentFileIndex + 1) * dataPageSize, tsStates[dataSetIndex], tsModes[dataSetIndex]), + success: function (data) { + if (AG_isLoadStarted) { + data = $.parseJSON(data); + const result = parseData(data, dataSetIndex); + nextData.push(result); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } + } + }); + } +} + +/* + * Data is received from the HLPR_parseJSON as a 500/74 array. We need to transform it + * into an 74/500 one and in the transformation also replace all NaN values. + */ +function parseData(dataArray, dataSetIndex) { + + let result = []; + for (let i = 0; i < noOfChannelsPerSet[dataSetIndex]; i++) { + result.push([]); + } + for (let j = 0; j < dataArray.length; j++) { + for (let k = 0; k < noOfChannelsPerSet[dataSetIndex]; k++) { + let arrElem = dataArray[j][k]; + if (arrElem === 'NaN') { + nanValueFound = true; + arrElem = 0; + } + result[k].push(arrElem); + } + } + return result; +} + +/** + * + * @param fileIndex + * @param asyncRead true only if the file should be read asynchronous + */ +function readTimeData(fileIndex, asyncRead) { + if (timeSetUrls[longestChannelIndex].length <= fileIndex) { + nextTimeData = []; + for (let i = 0; i < maxChannelLength; i++) { + nextTimeData.push(totalPassedData + i); + } + isNextTimeDataLoaded = true; + } else { + if (asyncRead) { + doAjaxCall({ + url: timeSetUrls[longestChannelIndex][fileIndex], + success: function (data) { + nextTimeData = $.parseJSON(data); + isNextTimeDataLoaded = true; + } + }); + } else { + nextTimeData = HLPR_readJSONfromFile(timeSetUrls[longestChannelIndex][fileIndex]); + isNextTimeDataLoaded = true; + } + } +} + +function getArrayFromDataFile(dataFile) { + let fileData = dataFile.replace(/\n/g, " ").replace(/\t/g, " "); + let arrayData = $.trim(fileData).split(" "); + for (let i = 0; i < arrayData.length; i++) { + arrayData[i] = parseFloat(arrayData[i]); + } + return arrayData; +} + +function getDisplayedChannels(listOfAllChannels, offset) { + let selectedData = []; + for (let i = 0; i < displayedChannels.length; i++) { + if (listOfAllChannels[displayedChannels[i] - offset] !== undefined) { + selectedData.push(listOfAllChannels[displayedChannels[i] - offset].slice(0)); + } + } + return selectedData; +} + + + +// below is originally in eeg/graph_events + +/* +* Handle zooming speed and scale related settings for animated graph. +**/ +//The zoom stack used for keeping track of zoom events for the 'back' option +var zoomStack = []; +//Previously point when displaying info on mouse hover +var previousPoint = null; + +function initializeCanvasEvents() { + // Prepare functions for Export Canvas as Image + + +} + + +//------------------------------------------------START ZOOM RELATED CODE-------------------------------------------------------- +function bindZoomEvent() { + /* + * On a zoom event, retain the x and y axis values in a stack, for the 'Back' zoom possibility. + */ + $("#EEGcanvasDiv").bind('plotzoom', function (event, plot) { + var axes = plot.getAxes(); + AG_isSpeedZero = true; + }); + + $("#EEGcanvasDiv").bind('plotselected', function (event, ranges) { + zoomStack.push([AG_options['xaxis']['min'], AG_options['xaxis']['max'], AG_options['yaxis']['min'], AG_options['yaxis']['max']]); + AG_options['xaxis'] = { min: ranges.xaxis.from, max: ranges.xaxis.to }; + AG_defaultYaxis['min'] = ranges.yaxis.from; + AG_defaultYaxis['max'] = ranges.yaxis.to; + //AG_options['yaxis'] = { min: ranges.yaxis.from, max: ranges.yaxis.to } + AG_isSpeedZero = true; + // redrawPlot(plot.getData()); + }); +} + +function stopAnimation() { + AG_isStopped = !AG_isStopped; + var btn = $("#ctrl-action-pause"); + if (AG_isStopped) { + btn.html("Start"); + btn.attr("class", "action action-controller-launch"); + } else { + btn.html("Pause"); + btn.attr("class", "action action-controller-pause"); + } + if (!AG_isStopped) { + // drawGraph(true, noOfShiftedPoints); + } +} + +function resetToDefaultView() { + /* + * When resetting to default view, clear all the data from the zoom stack + * and set the home values for x and y values. + */ + AG_options.xaxis = AG_homeViewXValues; + zoomStack = []; + AG_defaultYaxis.min = AG_homeViewYValues[0]; + AG_defaultYaxis.max = AG_homeViewYValues[1]; + // redrawPlot(plot.getData()); + if (!isSmallPreview ) { + if ($("#ctrl-input-speed").slider("option", "value") != 0) { + AG_isSpeedZero = false; + } + } +} + + +function zoomBack() { + /* + * Pop the last entry from the zoom stack and redraw with those option. + */ + if (zoomStack.length > 1) { + var previousValues = zoomStack.pop(); + AG_options['xaxis'] = {min: previousValues[0], max: previousValues[1]}; + AG_defaultYaxis['min'] = previousValues[2]; + AG_defaultYaxis['max'] = previousValues[3]; + // redrawPlot(plot.getData()); + } else { + resetToDefaultView() + } +} + +//------------------------------------------------END ZOOM RELATED CODE-------------------------------------------------------- + +//------------------------------------------------START SCALE RELATED CODE-------------------------------------------------------- +/** + * If we change the AG_translationStep then we have to redraw the current view using the new value of the AG_translationStep + */ +function redrawCurrentView() { + // var diff = AG_currentIndex - AG_numberOfVisiblePoints; + // for (var k = 0; k < AG_numberOfVisiblePoints; k++) { + // AG_displayedTimes[k] = AG_time[k + diff]; + // for (var i = 0; i < AG_noOfLines; i++) { + // AG_displayedPoints[i][k] = [AG_time[k + diff], AG_addTranslationStep(AG_allPoints[i][k + diff], i)]; + // } + // } + // AG_createYAxisDictionary(AG_noOfLines); + // redrawPlot([]); +} + + +function drawSliderForScale() { + function _onchange(){ + /** When scaling, we need to redraw the graph and update the HTML with the new values. + */ + var spacing = $("#ctrl-input-spacing").slider("value") / 4; + var scale = $("#ctrl-input-scale").slider("value"); + + if (spacing >= 0 && AG_currentIndex <= AG_numberOfVisiblePoints) { + AG_currentIndex = AG_numberOfVisiblePoints; + } else if (spacing < 0 && (AG_allPoints[0].length - AG_currentIndex) < AG_numberOfVisiblePoints) { + AG_currentIndex = AG_allPoints[0].length; + } + AG_displayedPoints = []; + for (var i = 0; i < AG_noOfLines; i++) { + AG_displayedPoints.push([]); + } + resetToDefaultView(); + _updateScaleFactor(spacing, scale); + } + + $("#ctrl-input-spacing").slider({ value: 4, min: 0, max: 8, change: _onchange}); + $("#ctrl-input-scale").slider({ value: 1, min: 1, max: 32, change: _onchange}); + + $("#display-spacing").html("" + AG_translationStep + '*' +AG_computedStep.toFixed(2)); + $("#display-scale").html("" + AG_scaling); +} + + +function _updateScaleFactor(spacing, scale) { + AG_translationStep = spacing; + AG_scaling = scale; + $("#display-spacing").html("" + AG_translationStep + '*' +AG_computedStep.toFixed(2)); + $("#display-scale").html("" + AG_scaling); + // redrawCurrentView(); + if (AG_isStopped) { + refreshChart(); + } +} + +//------------------------------------------------END SCALE RELATED CODE-------------------------------------------------------- + +//------------------------------------------------START HOVER RELATED CODE-------------------------------------------------------- + +function bindHoverEvent() { + $("#EEGcanvasDiv").bind("plothover", function (event, pos, item) { + /* + * When hovering over plot, if an item (FLOT point) is hovered over, then find the channel of that point + * by means of using the number of AG_translationStep * AG_computedStep intervals from the first channel. + * Then using this and the apporximate of the time value, get the actual data value from the AG_allPoints array. + */ + if (item) { + var timeValue = pos.x.toFixed(4); + var dataValue = pos.y.toFixed(4); + var rowIndex = AG_channelColorsDict[item.series.color]; + if (rowIndex == undefined) { + $("#info-channel").html(' None'); + $("#info-time").html(" 0"); + $("#info-value").html(" 0"); + $("#tooltip").remove(); + previousPoint = null; + return; + } + var startTime = AG_time.indexOf(AG_displayedPoints[0][0][0]); + dataValue = AG_allPoints[rowIndex][startTime + (parseInt((timeValue - AG_displayedPoints[0][0][0]) / (AG_displayedPoints[0][1][0] - AG_displayedPoints[0][0][0])))]; + $("#info-channel").html(' ' + chanDisplayLabels[displayedChannels[rowIndex]]); + $("#info-time").html(" " + timeValue); + $("#info-value").html(" " + dataValue); + + if (previousPoint != item.dataIndex || dataValue != undefined) { + previousPoint = item.dataIndex; + + $("#tooltip").remove(); + var x = item.datapoint[0].toFixed(2), + y = item.datapoint[1].toFixed(2); + showTooltip(item.pageX, item.pageY, + "Time: " + timeValue + ", Value: " + dataValue); + } + } else { + $("#info-channel").html(' None'); + $("#info-time").html(" 0"); + $("#info-value").html(" 0"); + $("#tooltip").remove(); + previousPoint = null; + } + }); +} + +function showTooltip(x, y, contents) { + /* + * A tooltip to display information about a specific point on 'mouse over'. + */ + $('
' + contents + '
').css( { + position: 'absolute', + display: 'none', + top: y + 5, + left: x + 5, + border: '1px solid #fdd', + padding: '2px', + 'background-color': '#fee', + opacity: 0.80 + }).appendTo("body").fadeIn(200); +} +//------------------------------------------------END HOVER RELATED CODE-------------------------------------------------------- + +//------------------------------------------------START SPEED RELATED CODE-------------------------------------------------------- +/** + * The method should be used when the animated chart is stopped. Draw graph without shifting. + */ +function refreshChart() { + AG_isStopped = false; + // drawGraph(false, noOfShiftedPoints); + AG_isStopped = true; +} + +function drawSliderForAnimationSpeed() { + $("#ctrl-input-speed").slider({ + orientation: 'horizontal', + value: 3, + min: -50, + max: 50, + change: function(event, ui) { + resetToDefaultView(); + updateSpeedFactor(); + } + }); +} + + +function updateSpeedFactor() { + var speed = $("#ctrl-input-speed").slider("option", "value"); + $('#display-speed').html(''+ speed); + AG_isSpeedZero = (speed == 0); +} + +//------------------------------------------------END SPEED RELATED CODE-------------------------------------------------------- diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html index 631c99a67..995d184f5 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html @@ -1,12 +1,32 @@
+
- +
+ + +
\ No newline at end of file From 476b33f52813a6876d990290b24629dc1a49ee5c Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 16 May 2018 10:53:34 +1000 Subject: [PATCH 05/53] TVB-2367 Add the section view's title for the new viewer --- tvb/interfaces/web/structure.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tvb/interfaces/web/structure.py b/tvb/interfaces/web/structure.py index a9002efb1..9d202cedc 100644 --- a/tvb/interfaces/web/structure.py +++ b/tvb/interfaces/web/structure.py @@ -122,6 +122,8 @@ class WebStructure(object): SUB_SECTION_VIEW_23 = "view_wavelet" SUB_SECTION_VIEW_24 = "view_annotations" SUB_SECTION_VIEW_25 = "view_matrix" + SUB_SECTION_VIEW_26 = "view_new_brain_dual" + ### Texts to appear in HTML page headers as section-title. @@ -209,7 +211,8 @@ class WebStructure(object): SUB_SECTION_VIEW_22: "Topography Visualizer", SUB_SECTION_VIEW_23: "Wavelet Visualizer", SUB_SECTION_VIEW_24: "Annotations Visualizer", - SUB_SECTION_VIEW_25: "Matrix Visualizer" + SUB_SECTION_VIEW_25: "Matrix Visualizer", + SUB_SECTION_VIEW_26: "New Brain Dual Activity Visualizer (3D and 2D)" } @@ -242,7 +245,8 @@ class WebStructure(object): SUB_SECTION_VIEW_22: "topographic-visualizer", SUB_SECTION_VIEW_23: "wavelet-spectrogram-visualizer", SUB_SECTION_VIEW_24: "annotations-visualizer", - SUB_SECTION_VIEW_25: "matrix-visualizer" + SUB_SECTION_VIEW_25: "matrix-visualizer", + SUB_SECTION_VIEW_26: "brain_dual" } From 31ca3df830a8cc1fd85910e43bd5efc0e5b51a7c Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 16 May 2018 10:56:20 +1000 Subject: [PATCH 06/53] TVB-2367 Change the online help id to new-brain-dual-visualiser --- tvb/interfaces/web/structure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb/interfaces/web/structure.py b/tvb/interfaces/web/structure.py index 9d202cedc..efd6241bf 100644 --- a/tvb/interfaces/web/structure.py +++ b/tvb/interfaces/web/structure.py @@ -246,7 +246,7 @@ class WebStructure(object): SUB_SECTION_VIEW_23: "wavelet-spectrogram-visualizer", SUB_SECTION_VIEW_24: "annotations-visualizer", SUB_SECTION_VIEW_25: "matrix-visualizer", - SUB_SECTION_VIEW_26: "brain_dual" + SUB_SECTION_VIEW_26: "new-brain-dual-visualiser" } From 89ae09bac2473ec56d40793117f347d5d4d4feb8 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 24 May 2018 13:34:06 +1000 Subject: [PATCH 07/53] TVB-2367 Remove unused controls: spacing and pages --- .../visualizers/new_dual_brain/controls.html | 30 ++----------------- 1 file changed, 2 insertions(+), 28 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index 52de77b32..fe7b16e32 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -56,15 +56,8 @@ -
- - 1 -
-
-
-
-
-
+ +
@@ -76,26 +69,7 @@ -
- - - ${min(number_of_visible_points, longestChannelLength)} - -
-
- - All points already displayed! - - - - A change here will trigger Graph redrawing from the first step! - -
From d2a989f4df5f04ee6bd265b62d8e4df81d60a0b8 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 25 May 2018 12:20:48 +1000 Subject: [PATCH 08/53] TVB-2368 Implementation of the 2D display - Time Selection syncing the start time with the 3D viewer - Value Inspector - Scrolling Boundaries --- .../visualizers/new_dual_brain/controls.html | 4 +- .../new_dual_brain/dual_brain_2d_view.html | 82 +- .../new_dual_brain/scripts/dualBrainViewer.js | 382 ++------ .../new_dual_brain/scripts/timeseriesD3.js | 886 ++++++++++++++++++ .../visualizers/new_dual_brain/view.html | 7 +- 5 files changed, 1010 insertions(+), 351 deletions(-) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index fe7b16e32..494948790 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -29,7 +29,7 @@
- +
@@ -57,8 +57,6 @@ - -
1 diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index 27c4b18b2..13852d1b2 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -1,71 +1,65 @@
+ + + - + + + + + + -
- -
    - -
  • - -
  • - - -
  • - -
  • - -
  • - -
  • +
    + +
      - -
    • - Channel - 0 -
    • + +
    • + Channel + 0 +
    • -
    • - Time - 0 -
    • +
    • + Time + 0 +
    • -
    • - Value - 0 -
    • - +
    • + Value + 0 +
    • - + - +
    • ${drawTimeseriesSelectorButton( - name, tsStateVars[name], tsModes[name], - groupedLabels[idx], initialSelection[idx], - containerId="channelSelector" + str(idx), - buttonTitle="Select signals from Input %d" % (idx+1))} + name, tsStateVars[name], tsModes[name], + groupedLabels[idx], initialSelection[idx], + containerId="channelSelector" + str(idx), + buttonTitle="Select signals from Input %d" % (idx+1))}
    • -
    - - +
+ +
- -
+ +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index c8069bd25..ade8eeffe 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -99,27 +99,13 @@ var lbl_x_width = 100; var lbl_x_height = 30; var zoom_range = [0.1, 20]; -var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; -var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; // the index of the cached file (the file that was loaded asynchronous) var cachedFileIndex = 0; -var labelX = ""; -var chartTitle = ""; //The displayed labels for the graph var chanDisplayLabels = []; // setup plot var AG_options = { - series: { - shadowSize: 0, - color: 'blue' - }, // drawing is faster without shadows - lines: { - lineWidth: 1, - show: true - }, - yaxis: AG_defaultYaxis, - xaxis: AG_defaultXaxis, grid: { backgroundColor: 'white', hoverable: true, @@ -137,9 +123,6 @@ var AG_options = { }, legend: { show: false - }, - hooks: { - processRawData: [processRawDataHook] } }; @@ -176,23 +159,10 @@ var AG_regionSelector = null; // State mode selector. Used as a global only in dual view var AG_modeSelector = null; -function resizeToFillParent() { - const canvas = $('#EEGcanvasDiv'); - let container, width, height; - if (!isSmallPreview) { - // Just use parent section width and height. For width remove some space for the labels to avoid scrolls - // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. - container = canvas.parent(); - width = container.width() - 40; - height = container.height() - 80; - } else { - container = $('body'); - width = container.width() - 40; - height = container.height() - 20; - } - canvas.width(width).height(height); -} +// GID for the D3 viewer +var filterGid = null; + window.onresize = function () { resizeToFillParent(); @@ -211,11 +181,7 @@ function AG_startAnimatedChart(ag_settings) { drawSliderForAnimationSpeed(); _AG_init_selection(ag_settings.measurePointsSelectionGIDs); - bindHoverEvent(); - initializeCanvasEvents(); - if (!ag_settings.extended_view) { - bindZoomEvent(); - } + } function AG_startAnimatedChartPreview(ag_settings) { @@ -267,6 +233,7 @@ function _AG_initGlobals(ag_settings) { totalTimeLength = ag_settings.totalLength; nanValueFound = ag_settings.nan_value_found; AG_computedStep = ag_settings.translationStep; + } /** @@ -286,7 +253,7 @@ function _AG_initPaginationState(number_of_visible_points) { * @private */ function _AG_preStart() { - resizeToFillParent(); + // resizeToFillParent(); } /** @@ -300,6 +267,8 @@ function _AG_init_selection(filterGids) { let i; let selectors = []; + filterGid = filterGids; + /** * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) @@ -375,18 +344,8 @@ function _AG_get_speed(defaultSpeed) { return speed; } -/* - * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for - * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a - * 'Home' action in a series of zoom events. - */ -function AG_createYAxisDictionary(nr_channels) { - -} - function refreshChannels() { submitSelectedChannels(false); - // drawGraph(false, noOfShiftedPoints); } function _AG_changeMode(tsIndex, val) { @@ -399,6 +358,12 @@ function _AG_changeStateVariable(tsIndex, val) { refreshChannels(); } + +//this function is used in virtualBrain.js keep it for now +function drawGraph() { + +} + function _AG_getSelectedDataAndLongestChannelIndex(data) { let offset = 0; let selectedData = []; @@ -472,27 +437,73 @@ function submitSelectedChannels(isEndOfData) { displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); } - // draw the first 'AG_numberOfVisiblePoints' points - // redrawCurrentView(); - if (!isSmallPreview) { - AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; - AG_scaling = $("#ctrl-input-scale").slider("value"); - } else { - AG_translationStep = 1; + + //TODO find why it's 1 and don't use hardcoded numbers here + var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; + var selectedLabels = [] + for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { + selectedLabels.push([chanDisplayLabels[displayedChannels[i]]]); } - AG_createYAxisDictionary(AG_noOfLines); - // redrawPlot([]); - resetToDefaultView(); - if (AG_isStopped) { - // AG_isStopped = false; - // drawGraph(false, noOfShiftedPoints); - AG_isStopped = true; + + ts = tv.plot.time_series(); + ts.baseURL(baseDataURLS[0]).preview(false).mode(0).state_var(0); + ts.shape(dataShape).t0(AG_time[1] / 2).dt(AG_time[1]); + ts.labels(selectedLabels); + ts.channels(AG_submitableSelectedChannels); + + + resizeToFillParent(ts); + $('#time-series-viewer').empty(); + ts(d3.select("#time-series-viewer")); + tsView = ts; + + // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value + _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; + + $("#ctrl-input-scale").slider({ + value: 50, min: 0, max: 100, + slide: function (event, target) { + _updateScalingFromSlider(target.value); + } + }); + +} + +var ts = null; + +function resizeToFillParent(ts) { + var container, width, height; + + container = $('#eegSectionId').parent(); + width = container.width(); + + //minus toolbar's height + height = container.height() - 60; + + ts.w(width).h(height); + +} + +function _updateScalingFromSlider(value) { + if (value == null) { + value = $("#ctrl-input-scale").slider("value"); + } + var expo_scale = (value - 50) / 50; // [1 .. -1] + var scale = Math.pow(10, expo_scale * 4); // [1000..-1000] + tsView.magic_fcs_amp_scl = _initial_magic_fcs_amp_scl * scale; + tsView.prepare_data(); + tsView.render_focus(); + + if (scale >= 1) { + $("#display-scale").html("1 * " + scale.toFixed(2)); } else { - // drawGraph(false, noOfShiftedPoints); + $("#display-scale").html("1 / " + (1 / scale).toFixed(2)); } + } + /** * This method decides if we are at the beginning or end of the graph, in which case we only need * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. @@ -536,67 +547,6 @@ function generateChannelColors(nr_of_channels) { } } -/* - * Get y-axis labels and update colors to correspond to each channel - */ -function setLabelColors() { - const labels = $('.flot-y-axis .tickLabel'); - for (let i = 0; i < labels.length; i++) { - const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); - if (chan_idx >= 0) { - labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; - labels[i].style.left = 80 + (i % 2) * 40 + 'px'; - } - } -} - -/* - * This method draw the actual plot. The 'executeShift' parameter decides if a shift is - * to be done, or just use the previous data points. 'shiftNo' decides the number of points - * that will be shifted. - */ -function drawGraph(executeShift, shiftNo) { - -} - -/* - * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. - */ -function redrawPlot(data) { - // const target = $('#EEGcanvasDiv'); - // const resizerChildren = target.children('.ui-resizable-handle'); - // for (let i = 0; i < resizerChildren.length; i++) { - // target[0].removeChild(resizerChildren[i]); - // } - // plot = $.plot(target, data, $.extend(true, {}, AG_options)); - // for (let j = 0; j < resizerChildren.length; j++) { - // target[0].appendChild(resizerChildren[j]); - // } - // setLabelColors(); -} - - -/** - * This hook will be called before Flot copies and normalizes the raw data for the given - * series. If the function fills in datapoints.points with normalized - * points and sets datapoints.pointsize to the size of the points, - * Flot will skip the copying/normalization step for this series. - */ -function processRawDataHook(plot, series, data, datapoints) { - datapoints.format = [ - {x: true, number: true, required: true}, - {y: true, number: true, required: true} - ]; - datapoints.pointsize = 2; - - for (let i = 0; i < data.length; i++) { - datapoints.points.push(data[i][0]); - datapoints.points.push(data[i][1]); - } - - series.xaxis.used = series.yaxis.used = true; -} - /** * Translate the given value. @@ -992,45 +942,7 @@ function getDisplayedChannels(listOfAllChannels, offset) { } - -// below is originally in eeg/graph_events - -/* -* Handle zooming speed and scale related settings for animated graph. -**/ -//The zoom stack used for keeping track of zoom events for the 'back' option -var zoomStack = []; -//Previously point when displaying info on mouse hover -var previousPoint = null; - -function initializeCanvasEvents() { - // Prepare functions for Export Canvas as Image - - -} - - //------------------------------------------------START ZOOM RELATED CODE-------------------------------------------------------- -function bindZoomEvent() { - /* - * On a zoom event, retain the x and y axis values in a stack, for the 'Back' zoom possibility. - */ - $("#EEGcanvasDiv").bind('plotzoom', function (event, plot) { - var axes = plot.getAxes(); - AG_isSpeedZero = true; - }); - - $("#EEGcanvasDiv").bind('plotselected', function (event, ranges) { - zoomStack.push([AG_options['xaxis']['min'], AG_options['xaxis']['max'], AG_options['yaxis']['min'], AG_options['yaxis']['max']]); - AG_options['xaxis'] = { min: ranges.xaxis.from, max: ranges.xaxis.to }; - AG_defaultYaxis['min'] = ranges.yaxis.from; - AG_defaultYaxis['max'] = ranges.yaxis.to; - //AG_options['yaxis'] = { min: ranges.yaxis.from, max: ranges.yaxis.to } - AG_isSpeedZero = true; - // redrawPlot(plot.getData()); - }); -} - function stopAnimation() { AG_isStopped = !AG_isStopped; var btn = $("#ctrl-action-pause"); @@ -1041,65 +953,15 @@ function stopAnimation() { btn.html("Pause"); btn.attr("class", "action action-controller-pause"); } - if (!AG_isStopped) { - // drawGraph(true, noOfShiftedPoints); - } -} - -function resetToDefaultView() { - /* - * When resetting to default view, clear all the data from the zoom stack - * and set the home values for x and y values. - */ - AG_options.xaxis = AG_homeViewXValues; - zoomStack = []; - AG_defaultYaxis.min = AG_homeViewYValues[0]; - AG_defaultYaxis.max = AG_homeViewYValues[1]; - // redrawPlot(plot.getData()); - if (!isSmallPreview ) { - if ($("#ctrl-input-speed").slider("option", "value") != 0) { - AG_isSpeedZero = false; - } - } -} - -function zoomBack() { - /* - * Pop the last entry from the zoom stack and redraw with those option. - */ - if (zoomStack.length > 1) { - var previousValues = zoomStack.pop(); - AG_options['xaxis'] = {min: previousValues[0], max: previousValues[1]}; - AG_defaultYaxis['min'] = previousValues[2]; - AG_defaultYaxis['max'] = previousValues[3]; - // redrawPlot(plot.getData()); - } else { - resetToDefaultView() - } } -//------------------------------------------------END ZOOM RELATED CODE-------------------------------------------------------- //------------------------------------------------START SCALE RELATED CODE-------------------------------------------------------- -/** - * If we change the AG_translationStep then we have to redraw the current view using the new value of the AG_translationStep - */ -function redrawCurrentView() { - // var diff = AG_currentIndex - AG_numberOfVisiblePoints; - // for (var k = 0; k < AG_numberOfVisiblePoints; k++) { - // AG_displayedTimes[k] = AG_time[k + diff]; - // for (var i = 0; i < AG_noOfLines; i++) { - // AG_displayedPoints[i][k] = [AG_time[k + diff], AG_addTranslationStep(AG_allPoints[i][k + diff], i)]; - // } - // } - // AG_createYAxisDictionary(AG_noOfLines); - // redrawPlot([]); -} function drawSliderForScale() { - function _onchange(){ + function _onchange() { /** When scaling, we need to redraw the graph and update the HTML with the new values. */ var spacing = $("#ctrl-input-spacing").slider("value") / 4; @@ -1114,103 +976,22 @@ function drawSliderForScale() { for (var i = 0; i < AG_noOfLines; i++) { AG_displayedPoints.push([]); } - resetToDefaultView(); - _updateScaleFactor(spacing, scale); + _updateScaleFactor(scale); } - $("#ctrl-input-spacing").slider({ value: 4, min: 0, max: 8, change: _onchange}); - $("#ctrl-input-scale").slider({ value: 1, min: 1, max: 32, change: _onchange}); + $("#ctrl-input-scale").slider({value: 1, min: 1, max: 32, change: _onchange}); - $("#display-spacing").html("" + AG_translationStep + '*' +AG_computedStep.toFixed(2)); $("#display-scale").html("" + AG_scaling); } - -function _updateScaleFactor(spacing, scale) { - AG_translationStep = spacing; +function _updateScaleFactor(scale) { AG_scaling = scale; - $("#display-spacing").html("" + AG_translationStep + '*' +AG_computedStep.toFixed(2)); $("#display-scale").html("" + AG_scaling); - // redrawCurrentView(); - if (AG_isStopped) { - refreshChart(); - } } //------------------------------------------------END SCALE RELATED CODE-------------------------------------------------------- -//------------------------------------------------START HOVER RELATED CODE-------------------------------------------------------- - -function bindHoverEvent() { - $("#EEGcanvasDiv").bind("plothover", function (event, pos, item) { - /* - * When hovering over plot, if an item (FLOT point) is hovered over, then find the channel of that point - * by means of using the number of AG_translationStep * AG_computedStep intervals from the first channel. - * Then using this and the apporximate of the time value, get the actual data value from the AG_allPoints array. - */ - if (item) { - var timeValue = pos.x.toFixed(4); - var dataValue = pos.y.toFixed(4); - var rowIndex = AG_channelColorsDict[item.series.color]; - if (rowIndex == undefined) { - $("#info-channel").html(' None'); - $("#info-time").html(" 0"); - $("#info-value").html(" 0"); - $("#tooltip").remove(); - previousPoint = null; - return; - } - var startTime = AG_time.indexOf(AG_displayedPoints[0][0][0]); - dataValue = AG_allPoints[rowIndex][startTime + (parseInt((timeValue - AG_displayedPoints[0][0][0]) / (AG_displayedPoints[0][1][0] - AG_displayedPoints[0][0][0])))]; - $("#info-channel").html(' ' + chanDisplayLabels[displayedChannels[rowIndex]]); - $("#info-time").html(" " + timeValue); - $("#info-value").html(" " + dataValue); - - if (previousPoint != item.dataIndex || dataValue != undefined) { - previousPoint = item.dataIndex; - - $("#tooltip").remove(); - var x = item.datapoint[0].toFixed(2), - y = item.datapoint[1].toFixed(2); - showTooltip(item.pageX, item.pageY, - "Time: " + timeValue + ", Value: " + dataValue); - } - } else { - $("#info-channel").html(' None'); - $("#info-time").html(" 0"); - $("#info-value").html(" 0"); - $("#tooltip").remove(); - previousPoint = null; - } - }); -} - -function showTooltip(x, y, contents) { - /* - * A tooltip to display information about a specific point on 'mouse over'. - */ - $('
' + contents + '
').css( { - position: 'absolute', - display: 'none', - top: y + 5, - left: x + 5, - border: '1px solid #fdd', - padding: '2px', - 'background-color': '#fee', - opacity: 0.80 - }).appendTo("body").fadeIn(200); -} -//------------------------------------------------END HOVER RELATED CODE-------------------------------------------------------- - //------------------------------------------------START SPEED RELATED CODE-------------------------------------------------------- -/** - * The method should be used when the animated chart is stopped. Draw graph without shifting. - */ -function refreshChart() { - AG_isStopped = false; - // drawGraph(false, noOfShiftedPoints); - AG_isStopped = true; -} function drawSliderForAnimationSpeed() { $("#ctrl-input-speed").slider({ @@ -1218,8 +999,7 @@ function drawSliderForAnimationSpeed() { value: 3, min: -50, max: 50, - change: function(event, ui) { - resetToDefaultView(); + change: function (event, ui) { updateSpeedFactor(); } }); @@ -1228,7 +1008,7 @@ function drawSliderForAnimationSpeed() { function updateSpeedFactor() { var speed = $("#ctrl-input-speed").slider("option", "value"); - $('#display-speed').html(''+ speed); + $('#display-speed').html('' + speed); AG_isSpeedZero = (speed == 0); } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js new file mode 100644 index 000000000..4de93c68c --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -0,0 +1,886 @@ +/** + * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * Web-UI helpful to run brain-simulations. To use it, you also need do download + * TheVirtualBrain-Scientific Package (for simulators). See content of the + * documentation-folder for more details. See also http://www.thevirtualbrain.org + * + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others + * + * This program is free software: you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software Foundation, + * either version 3 of the License, or (at your option) any later version. + * This program is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU General Public License for more details. + * You should have received a copy of the GNU General Public License along with this + * program. If not, see . + * + **/ + +/* + + tv.js should dump just a single public var named tv (T.VB V.isualizations) + + tv = {} + + with + + tv.ndar array fun + tv.plot reusable plotting components + tv.util utility stuff + + */ + +/* global tv, d3 */ + +tv = {}; + +tv.util = { + + // d3 style configurator. if this is slow, interp and eval source + gen_access: function (obj, field) { + return function (maybe) { + if (maybe === undefined) { + return obj["_" + field]; + } else { + obj["_" + field] = maybe; + return obj; + } + }; + }, + + // helper to add usage notes to plots + usage: function (root, heading, notes) { + const p = root.append("p"); + p.classed("slice-info", true); + p.append("h3").classed("instructions", true).text(heading); + p.append("ul").selectAll("li").data(notes) + .enter().append("li").classed("instructions", true).text(function (d) { + return d; + }); + }, + + ord_nums: ["zeroeth", "first", "second", "third", "fourth", "fifth", "sixth", "seventh", "eighth", "ninth", "tenth", + "eleventh", "twelfth", "thirteenth", "fourteenth", "fifteenth", "sixteenth", "seventeenth", "eighteenth", "nineteenth"], + + /* f is a templater/formatter cf. https://gist.github.com/984375 */ + fmt: function (f) { // fhe format specifier followed by any number of arguments + + var a = arguments; // store outer arguments + return ("" + f) // force format specifier to String + .replace( // replace tokens in format specifier + /\{(?:(\d+)|(\w+))\}/g, // match {token} references + function (s, // the matched string (ignored) + i, // an argument index + p // a property name + ) { + return p && a[1] // if property name and first argument exist + ? a[1][p] // return property from first argument + : a[i]; // assume argument index and return i-th argument + }); + }, + + get_array_shape: function (baseURL, callback) { + $.getJSON(baseURL + "/read_data_shape/False?kwd=0", callback); + }, + + get_array_slice: function (baseURL, slices, callback, channels, currentMode, currentStateVar) { + var readDataURL = readDataChannelURL(baseURL, slices[0].lo, slices[0].hi, + currentStateVar, currentMode, slices[0].di, JSON.stringify(channels)); + //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. + // Method called is from time_series.py. + $.getJSON(readDataURL, callback); + } +}; + +tv.ndar = function (data) { + + this.data = data; + + this.imap = function (f) { + for (var i = 0; i < this.data.length; i++) { + this.data[i] = f(this.data[i]); + } + return this; + }; + + this.map = function (f) { + return (new tv.ndar(this.data.slice())).imap(f); + }; + + this.reduce = function (f, init) { + for (var i = 0; i < this.data.length; i++) { + init = f(init, this.data[i]); + } + return init; + }; + + this.max = function () { + return this.reduce(function (l, r) { + return l > r ? l : r; + }, -1e300); + }; + + this.min = function () { + return this.reduce(function (l, r) { + return l < r ? l : r; + }, 1e300); + }; + + this.sum = function () { + return this.reduce(function (l, r) { + return l + r; + }, 0); + }; + + this.mean = function () { + return this.sum() / this.length(); + }; + + this.std = function () { + var mean_sqr = this.map(function (x) { + return x * x; + }).mean(), + mean = this.mean(); + return Math.sqrt(mean_sqr - mean * mean); + }; + + this.add = function (b) { + return this.map(function (x) { + return x + b; + }); + }; + + this.sub = function (b) { + return this.add(-b); + }; + + this.mul = function (b) { + return this.map(function (x) { + return x * b; + }); + }; + + this.imul = function (b) { + return this.imap(function (x) { + return x * b; + }); + }; + + this.idiv = function (b) { + return this.imul(1 / b); + }; + + this.div = function (b) { + return this.mul(1 / b); + }; + + this.get = function (i) { + return this.data[i]; + }; + + this.set = function (i, val) { + this.data[i] = val; + }; + + this.nd2lin = function (idx) { + var l = 0; + for (var i = 0; i < idx.length; i++) { + l += this.strides[i] * idx[i]; + } + return l; + }; + + this.length = function () { + return this.data.length; + }; + + // return indices where condition is true + this.where = function (f) { + var indices = []; + for (var i = 0; i < this.data.length; i++) { + if (f(this.data[i], i)) { + indices.push(i); + } + } + return indices; + }; + + this.pretty_step = function (base) { + return Math.pow(base, Math.floor(-1 + Math.log(this.max() - this.min()) / Math.log(base))); + }; + + this.pretty_ticks = function (base) { + var d = this.pretty_step(base || 10), f = Math.floor; + return tv.ndar.range(f(this.min() / d) * d, (f(this.max() / d) + 1) * d, d); + }; + + this.pretty_ticklabels = function (base) { + return this.pretty_ticks(base).map(function (d) { + return d.toPrecision(2); + }); + }; + + this.normalized = function () { + var mn = this.min(), mx = this.max(); + return this.map(function (d) { + return (d - mn) / (mx - mn); + }); + }; + + this.slice = function (lo, hi) { + return tv.ndar.from(this.data.slice(lo, hi)); + }; + +}; + +tv.ndar.from = function (src) { + return new tv.ndar(src); +}; + +tv.ndar.ndfrom = function (src) { + var a = tv.ndar.from(src.data); + a.shape = src.shape; + a.strides = src.strides; + return a; +}; + +tv.ndar.range = function (a, b, c) { + var lo, hi, dx; + + if ((a || a === 0) && b) { + if (c) { + dx = c; + } + else { + dx = 1; + } + lo = a; + hi = b; + } else { + hi = a; + lo = 0; + dx = 1; + } + + var end = Math.floor((hi - lo) / dx); + var ar = new tv.ndar([]); + for (var i = 0; i < end; i++) { + ar.data[i] = dx * i + lo; + } + return ar; + +}; + +tv.ndar.zeros = function (n) { + return tv.ndar.range(n).imap(function () { + return 0.0; + }); +}; + +tv.ndar.ones = function (n) { + return tv.ndar.zeros(n).add(1.0); +}; + + +tv.plot = { + + time_series: function () { + + var f = function (root) { + + f.p(f.p() || 0.1); // pad + f.w(f.w() || 700); + f.h(f.h() || 500); + f.point_limit(f.point_limit() || 500); + + f.magic_fcs_amp_scl = 1; + + // make sure we got numbers not strings + f.dt(+f.dt()); + f.t0(+f.t0()); + + // Create the required UI elements. + var svg = root.append("svg").attr("width", f.w()).attr("height", f.h()); + var rgp = svg.append("g").attr("transform", "scale(1, 1)"); + + rgp.append("g").append("rect").attr("width", f.w()).attr("height", f.h()).classed("tv-fig-bg", true); + + f.status_line = svg.append("g").attr("transform", "translate(10, " + (f.h() - 10) + ")").append("text"); + + // parts independent of data + f.compute_layout(); + f.add_resizer(svg, rgp); + f.do_scaffolding(rgp); + + // inversion of flow control in progress + f.we_are_setup = false; + f.render(); + }; // end function f() + + f.render = function () { + f.status_line.text("waiting for data from server..."); + //console.log(f.baseURL(), f.current_slice()) + tv.util.get_array_slice(f.baseURL(), f.current_slice(), f.render_callback, f.channels(), f.mode(), f.state_var()); + }; + + f.render_callback = function (data) { + + var kwd = kwd || {}; + + f.status_line.text("handling data..."); + + /* reformat data into normal ndar style */ + var flat = [] + , sl = f.current_slice()[0] + , shape = [(sl.hi - sl.lo) / sl.di, f.shape()[2]] + , strides = [f.shape()[2], 1]; + + for (var i = 0; i < shape[0]; i++) { + for (var j = 0; j < shape[1]; j++) { + flat.push(data[i][j]); + } + } + + var ts = [], t0 = f.t0(), dt = f.dt(); + + for (var ii = 0; ii < shape[0]; ii++) { + ts.push(t0 + dt * sl.lo + ii * dt * sl.di); + } + + f.ts(tv.ndar.ndfrom({data: ts, shape: [shape[0]], strides: [1]})); + f.ys(tv.ndar.ndfrom({data: flat, shape: shape, strides: strides})); + + f.status_line.text("examining data..."); + f.prepare_data(); + f.status_line.text("rendering data..."); + f.render_focus(); + + if (!f.we_are_setup) { + f.render_contexts(); + f.add_brushes(); + f.br_fcs_endfn(true); // no_render=true + f.we_are_setup = true; + } + + f.status_line.text(""); + }; + + f.current_slice = function () { + var dom = f.sc_fcs_x.domain() + , lo = Math.floor((dom[0] - f.t0()) / f.dt()) + , hi = Math.floor((dom[1] - f.t0()) / f.dt()) + , di = Math.floor((hi - lo) / (2 * f.point_limit())); + + di = di === 0 ? 1 : di; + + if (lo > f.shape()[0]) { + console.log("time_series.current_slice(): found lo>shape[0]: " + lo + ">" + f.shape()[0]); + lo = f.shape()[0]; + } + + return [{lo: lo, hi: hi, di: di}]; + }; + + // dimensions and placement of focus and context areas + f.compute_layout = function () { + // pad is only provisionally basis for dimensioning the context areas; later + // we will need to have inner and outer pad + f.pad = {x: (0 ? f.w() : f.h()) * f.p(), y: f.h() * f.p()}; + f.ul_ctx_y = {x: f.pad.x, y: f.pad.y}; + f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; + f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; + f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y}; + f.ul_fcs = {x: f.ul_ctx_x.x, y: f.ul_ctx_y.y}; + f.sz_fcs = {x: f.sz_ctx_x.x, y: f.sz_ctx_y.y}; + + }; + + // allows user to scale plot size dynamically + // TODO refactor place in tv.util + f.add_resizer = function (svg, rgp) { + + var resize_start; + + rgp.append("g").append("rect").classed("tv-resizer", true) + .on("mouseover", function () { + rgp.attr("style", "cursor: se-resize"); + }) + .on("mouseout", function () { + rgp.attr("style", ""); + }) + .attr("x", f.w() - f.pad.x / 2).attr("y", f.h() - f.pad.y / 2) + .attr("width", f.pad.x / 2).attr("height", f.pad.y / 2) + .call(d3.drag().on("drag", function () { + var p1 = d3.mouse(svg.node()) + , p2 = resize_start + , scl = {x: p1[0] / p2[0], y: p1[1] / p2[1]}; + rgp.attr("transform", "scale(" + scl.x + ", " + scl.y + ")"); + svg.attr("width", scl.x * f.w()).attr("height", scl.y * f.h()); + }).on("start", function () { + resize_start = d3.mouse(rgp.node()); + })); + }; + + // TODO migrate to tv.util + var new_clip_path = function (el, id) { + return el.append("defs").append("clipPath").attr("id", id); + }; + + f.mouse_scroll = function () { + var ev = window.event + , da = ev.detail ? ev.detail : ev.wheelDelta + , sh = ev.shiftKey + , dr = !!(da > 0); + + if (sh) { + f.magic_fcs_amp_scl *= dr ? 1.2 : 1 / 1.2; + // TODO scale transform instead via direct access... + f.prepare_data(); + f.render_focus(); + } else { + if (!(f.gp_br_fcs.node().__brush === null)) { + var dx = dr ? 1 : -1; + // stop scrolling if it is the end of the signals' list + if (f.y_dom[0] >= -1 && f.y_dom[1] <= f.channels().length) { + f.y_dom[0] += dx; + f.y_dom[1] += dx; + } + //lower bound + else if (f.y_dom[0] < -1) { + var delta = Math.abs(f.y_dom[0] - (-1)); + f.y_dom[0] += delta; + f.y_dom[1] += delta; + } + //upper bound + else if (f.y_dom[1] > f.channels().length) { + var delta = Math.abs(f.channels().length - f.y_dom[1]); + f.y_dom[0] -= delta; + f.y_dom[1] -= delta; + } + + //redraw the lines + var dom = f.y_dom; + var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; + console.log(dom); + f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); + f.gp_ax_fcs_y.call(f.ax_fcs_y); + f.gp_lines.selectAll("g").attr("transform", function (d, i) { + return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" + }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); + f.scale_focus_stroke(); + + + } + + + } + + + }; + + f.signal_tick_labeler = function (tick_value) { + return (tick_value % 1 === 0) ? f.labels()[tick_value] : ""; + }; + + // setup groups, scales and axes for context and focus areas + f.do_scaffolding = function (rgp) { + + // main groups for vertical and horizontal context areas and focus area + f.gp_ctx_x = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_x.x + ", " + f.ul_ctx_x.y + ")"); + f.gp_ctx_x.append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y).classed("tv-data-bg", true); + + f.gp_fcs = rgp.append("g").attr("transform", "translate(" + f.ul_fcs.x + ", " + f.ul_fcs.y + ")"); + f.gp_fcs.on("mousewheel", f.mouse_scroll); + f.gp_fcs.append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y).classed("tv-data-bg", true); + + + // the plotted time series in the focus and x ctx area are subject to a clipping region + new_clip_path(rgp, "fig-lines-clip").append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y); + // new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); + + // group with clip path applied for the focus lines + f.gp_lines = f.gp_fcs.append("g").attr("style", "clip-path: url(#fig-lines-clip)") + .append("g").classed("line-plot", true); + + // scales for vertical and horizontal context, and the x and y axis of the focus area + f.sc_ctx_y = d3.scaleLinear().domain([-1, f.shape()[2]]).range([f.sz_ctx_y.y, 0]); + f.sc_ctx_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_ctx_x.x]); + f.sc_fcs_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_fcs.x]); + f.sc_fcs_y = d3.scaleLinear().domain([-1, f.shape()[2] + 1]).range([f.sz_fcs.y, 0]); + + + f.dom_x = f.sc_ctx_x.domain(); + f.dom_y = f.sc_ctx_y.domain(); + + // axes for each of the above scales + f.ax_ctx_x = d3.axisBottom(f.sc_ctx_x); + f.ax_fcs_x = d3.axisTop(f.sc_fcs_x); + f.ax_fcs_y = d3.axisLeft(f.sc_fcs_y); + + f.ax_fcs_y.tickFormat(f.signal_tick_labeler); + + // groups for each of the above axes + f.gp_ax_ctx_x = f.gp_ctx_x.append("g").classed("axis", true).call(f.ax_ctx_x) + .attr("transform", "translate(0, " + f.sz_ctx_x.y + ")"); + f.gp_ax_fcs_x = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_x); + f.gp_ax_fcs_y = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_y); + + }; + + f.prepare_data = function () { + + var ts = f.ts(); + var ys = f.ys(); + var da_lines = []; + var line_avg; + var ys_std = ys.min(); + //To set this properly, we need to know: + // nsig - how many signals on the screen? + // std - std of signals + // pxav - vertical pixels available + + for (var sig_idx = 0; sig_idx < ys.shape[1]; sig_idx++) { + + da_lines[sig_idx] = []; + for (var t_idx = 0; t_idx < ys.shape[0]; t_idx++) { + da_lines[sig_idx][t_idx] = ys.data[ys.strides[0] * t_idx + sig_idx]; + } + + line_avg = d3.mean(da_lines[sig_idx]); + for (var tt_idx = 0; tt_idx < ys.shape[0]; tt_idx++) { + da_lines[sig_idx][tt_idx] = f.magic_fcs_amp_scl * (da_lines[sig_idx][tt_idx] - line_avg) / Math.abs(ys_std); + // multiply by -1 because the y axis points down + da_lines[sig_idx][tt_idx] *= -1; + } + + + da_lines[sig_idx] = {sig: da_lines[sig_idx], id: sig_idx}; + } + + // compute context data + var da_x = [] + , da_xs = [] + , da_y = [] + , ys_mean = ys.mean() + , ys_std = ys.std() + , n_chan = ys.shape[1] + , datum; + + // center an average signal + for (var j = 0; j < ts.shape[0]; j++) { + da_x[j] = 0; + da_xs[j] = 0; + for (var i = 0; i < n_chan; i++) { + datum = ys.data[j * n_chan + i]; + da_x [j] += datum; + da_xs[j] += datum * datum; + } + da_xs[j] = Math.sqrt(da_xs[j] / n_chan - ((da_x[j] / n_chan) * (da_x[j] / n_chan))); + da_x [j] = (da_x[j] / n_chan - ys_mean); + // multiply by -1 because y axis points down + da_x[j] *= -1; + + if ((isNaN(da_x[j])) || (isNaN(da_xs[j]))) { + console.log("encountered NaN in data: da_x[" + j + "] = " + da_x[j] + ", da_xs[" + j + "] = " + da_xs[j] + "."); + } + } + + // scale average signal by ptp + var _dar = new tv.ndar(da_x); + var da_max = _dar.max() + , da_min = _dar.min() + , da_ptp = da_max - da_min; + + for (var si = 0; si < da_x.length; si++) { + da_x[si] = da_x[si] / da_ptp; + } + + // center and scale the std line + da_xs.min = tv.ndar.from(da_xs).min(); + for (var jj = 0; jj < da_xs.length; jj++) { + da_xs[jj] -= da_xs.min; + da_xs[jj] /= ys_std; + // multiply by -1 because y axis points down + da_xs[jj] *= -1; + } + + // center and scale to std each signal + for (var jjj = 0; jjj < n_chan; jjj++) { + da_y[jjj] = []; + // This computes a slice at the beginning of the signal to be displayed on the y axis + // The signal might be shorter than the width hence the min + for (var ii = 0; ii < Math.min(f.sz_ctx_y.x, ys.shape[0]); ii++) { + da_y[jjj][ii] = (ys.data[ii * n_chan + jjj] - ys_mean) / ys_std; + // multiply by -1 because y axis points down + da_y[jjj][ii] *= -1; + } + } + + f.da_lines = da_lines; + f.da_x_dt = f.dt() * f.current_slice()[0].di; + f.da_x = da_x; + f.da_xs = [0, da_xs[da_xs.length - 1]].concat(da_xs, [0]); // filled area needs start == end + f.da_y = da_y; + }; + + f.render_focus = function () { + + var ts = f.ts() + , g = f.gp_lines.selectAll("g").data(f.da_lines, function (d) { + return d.id; + }); + + + if (!f.we_are_setup) { + + + f.line_paths = g.enter() + .append("g") + .attr("transform", function (d, i) { + return "translate(0, " + f.sc_fcs_y(i) + ")"; + }) + .append("path") + .attr("vector-effect", "non-scaling-stroke"); + } + + + f.line_paths.attr("d", function (d) { + return d3.line() + .x(function (d, i) { + return f.sc_ctx_x(ts.data[i]); + }) + .y(function (d) { + return d; + }) + (d.sig); + }); + + + }; + + f.render_contexts = function () { + + // originally used to draw context lines and average + + + }; + + f.scale_focus_stroke = function () { + var total = f.sz_fcs + , xdom = f.sc_fcs_x.domain() + , ydom = f.sc_fcs_y.domain() + , dx = xdom[1] - xdom[0] + , dy = ydom[1] - ydom[0] + , area = dx * dy + , area2 = total.x * total.y; + + //console.log(area / area2); + if (window.navigator.userAgent.indexOf("Edge") > -1) { + f.gp_lines.selectAll("g").selectAll("path").attr("stroke-width", "0.3px");//4*Math.sqrt(Math.abs(area / area2))) + } else { + f.gp_lines.selectAll("g").selectAll("path").attr("stroke-width", "1px");//4*Math.sqrt(Math.abs(area / area2))) + } + }; + + f.add_brushes = function () { + + // horizontal context brush + var br_ctx_x_fn = function () { + + var event_selection_x = []; + // Different extent when it is: + //1.from the brush of 2D Focus Brush + if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection_x[0] = d3.event.selection[0][0]; + event_selection_x[1] = d3.event.selection[1][0]; + } + //2.from the end of focus brush + else if (d3.event.selection == null) { + event_selection_x = [f.sc_ctx_x.range()[0], f.sc_ctx_x.range()[1]]; + f.dom_x = [f.t0(), f.t0() + f.dt() * f.shape()[0]]; + } + //3.from itself + else { + event_selection_x = d3.event.selection; + } + + + var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); + + + //selection is now in coordinates and we have to map it using scales + event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); + + + dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; + + f.dom_x = dom; + + sc = f.sc_fcs_x; + x_scaling = scale_brushed.domain()[1] / (dom[1] - dom[0]); + sc.domain(dom); + f.sc_ctx_x.domain(dom); + f.gp_ax_fcs_x.call(f.ax_fcs_x); + f.gp_ax_ctx_x.call(f.ax_ctx_x); + + + // TODO: This seems to cause problems with negative values and commenting it out does not seem to + // cause any additional problems. This could do with some double checking. + f.gp_lines.attr("transform", "translate(" + sc(0) + ", 0) scale(" + x_scaling + ", 1)"); + } + + // vertical changes + , br_ctx_y_fn = function () { + + var event_selection_y = []; + + if (d3.event == null || d3.event.selection == null) { + event_selection_y = f.sc_ctx_y.range(); + f.dom_y = [-1, f.shape()[2]]; + } + else if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection_y[1] = d3.event.selection[0][1]; + event_selection_y[0] = d3.event.selection[1][1]; + } + else { + event_selection_y[0] = d3.event.selection[1]; + event_selection_y[1] = d3.event.selection[0]; + } + + var scale_brushed = d3.scaleLinear().domain(f.dom_y).range(f.sc_ctx_y.range()); + + + event_selection_y = event_selection_y.map(scale_brushed.invert, scale_brushed); + var dom = f.br_ctx_y === null ? f.sc_ctx_y.domain() : event_selection_y; + f.dom_y = dom; + var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; + f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); + f.gp_ax_fcs_y.call(f.ax_fcs_y); + f.gp_lines.selectAll("g").attr("transform", function (d, i) { + return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" + }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); + + }; + + f.br_ctx_y_fn = br_ctx_y_fn; + br_ctx_end = function () { + + //get the selected time range + var event_selection_x = []; + if (d3.event.selection != null) { + event_selection_x[0] = d3.event.selection[0]; + event_selection_x[1] = d3.event.selection[1]; + } + var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); + event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); + dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; + + // remove the last time's selection + f.gp_ctx_x.select(".selected-time").remove(); + + + //change the actual time point in the slider + if (d3.event.selection != null) { + //display the selected time range + f.text = f.gp_ctx_x.append("text").attr("class", "selected-time") + .text("Selected Time Range " + event_selection_x[0] + " " + event_selection_x[1]) + + //update the time in the input tag + d3.select("#TimeNow").property('value', event_selection_x[0].toFixed(2)); + + //update the time in the 3d viewer's time + $('#slider').slider('value', event_selection_x[0].toFixed(2)); + loadFromTimeStep(parseInt(event_selection_x[0])); + + //need a function to test the slider value + // scyncing betweet the 2D tiem selection and 3D movie + if ($('#slider').slider("option", "value") == parseInt(event_selection_x[1])) { + console.log('stop'); + } + } + + + }; + + // on end of focus brush + // this is on f so that f can call it when everything else is done.. + f.br_fcs_endfn = function (no_render) { + if (!d3.event || !d3.event.sourceEvent) { + br_ctx_y_fn(); + f.scale_focus_stroke(); + return; + } + br_ctx_x_fn(); + br_ctx_y_fn(); + f.gp_br_fcs.node().__brush.selection = null; + f.gp_br_fcs.call(f.br_fcs); + f.scale_focus_stroke(); + + + }; + + + f.br_fcs_startfn = function () { + // we will use the left upper of the brush to do a tooltip + + //select a channel + var event_selection_y = []; + event_selection_y[1] = d3.event.selection[0][1]; + event_selection_y = event_selection_y.map(f.sc_ctx_y.invert) + + //choose the time point + var event_selection_x = []; + event_selection_x[1] = d3.event.selection[0][0]; + event_selection_x = event_selection_x.map(f.sc_ctx_x.invert) + if (event_selection_x[1] < 0) { + event_selection_x[1] = 0 + f.da_x; + } + + + timerange = f.sc_fcs_x.domain()[1]; + channelID = parseInt(event_selection_y[1]); + timepoint_length = f.da_lines[channelID].sig.length + + timepoint = event_selection_x[1] / f.sc_fcs_x.domain()[1]; + timepoint = timepoint * timepoint_length; + timepoint = parseInt(timepoint); + + valuearray = f.ys().data; + channel_number = f.channels().length + channel_index = f.channels().indexOf(channelID); + + //print out the channel name(label) and value + $("#info-channel").html(' ' + f.labels()[parseInt(event_selection_y[1])]); + $("#info-time").html(" " + timepoint); + $("#info-value").html(" " + valuearray[channel_number * timepoint + channel_index]); + + } + + + // create brushes + f.br_ctx_x = d3.brushX().extent([[f.sc_ctx_x.range()[0], 0], [f.sc_ctx_x.range()[1], f.sz_ctx_x.y]]).on("end", br_ctx_end); + f.br_fcs = d3.brush().extent([[f.sc_fcs_x.range()[0], 0], [f.sc_fcs_x.range()[1], f.sz_fcs.y]]) + .on("end", f.br_fcs_endfn).on("start", f.br_fcs_startfn) + .on("brush", f.br_fcs_brush); + + // add brush groups and add brushes to them + f.gp_br_ctx_x = f.gp_ctx_x.append("g"); + + f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); + f.gp_br_ctx_x.append("g").classed("brush", true).call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); + + + }; + + f.parameters = ["w", "h", "p", "baseURL", "preview", "labels", "shape", + "t0", "dt", "ts", "ys", "point_limit", "channels", "mode", "state_var"]; + f.parameters.map(function (name) { + f[name] = tv.util.gen_access(f, name); + }); + + return f; + }, + + +}; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html index 995d184f5..8aab2cc15 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html @@ -17,16 +17,17 @@ + \ No newline at end of file From 8c5bc0a0922f95858b5e6fde3436cf16db53600e Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 25 May 2018 18:14:32 +1000 Subject: [PATCH 09/53] TVB-2368 Fix the scrolling f.y_dom should be f.dom_y --- .../new_dual_brain/scripts/timeseriesD3.js | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index 4de93c68c..db55fdaba 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -442,27 +442,26 @@ tv.plot = { if (!(f.gp_br_fcs.node().__brush === null)) { var dx = dr ? 1 : -1; // stop scrolling if it is the end of the signals' list - if (f.y_dom[0] >= -1 && f.y_dom[1] <= f.channels().length) { - f.y_dom[0] += dx; - f.y_dom[1] += dx; + if (f.dom_y[0] >= -1 && f.dom_y[1] <= f.channels().length) { + f.dom_y[0] += dx; + f.dom_y[1] += dx; } //lower bound - else if (f.y_dom[0] < -1) { + else if (f.dom_y[0] < -1) { var delta = Math.abs(f.y_dom[0] - (-1)); - f.y_dom[0] += delta; - f.y_dom[1] += delta; + f.dom_y[0] += delta; + f.dom_y[1] += delta; } //upper bound - else if (f.y_dom[1] > f.channels().length) { + else if (f.dom_y[1] > f.channels().length) { var delta = Math.abs(f.channels().length - f.y_dom[1]); - f.y_dom[0] -= delta; - f.y_dom[1] -= delta; + f.dom_y[0] -= delta; + f.dom_y[1] -= delta; } //redraw the lines - var dom = f.y_dom; + var dom = f.dom_y; var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; - console.log(dom); f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); f.gp_ax_fcs_y.call(f.ax_fcs_y); f.gp_lines.selectAll("g").attr("transform", function (d, i) { @@ -683,6 +682,8 @@ tv.plot = { } }; + + f.add_brushes = function () { // horizontal context brush From ea374321b1fa364627f977762dc4bb216c225b60 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Sun, 27 May 2018 16:31:08 +1000 Subject: [PATCH 10/53] Remove unused dependency d3tip --- .../genshi/visualizers/new_dual_brain/dual_brain_2d_view.html | 3 --- 1 file changed, 3 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index 13852d1b2..0dce8585e 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -1,8 +1,6 @@
- - @@ -11,7 +9,6 @@ - From ff05b7d01287553438ccf2886cf3ed33b04e5f4d Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 28 May 2018 11:59:32 +1000 Subject: [PATCH 11/53] TVB-2368 Fix the scrolling two more f.dom_y fixes add interval value --- .../visualizers/new_dual_brain/scripts/timeseriesD3.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index db55fdaba..cf25b54b3 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -448,13 +448,13 @@ tv.plot = { } //lower bound else if (f.dom_y[0] < -1) { - var delta = Math.abs(f.y_dom[0] - (-1)); + var delta = Math.abs(f.dom_y[0] - (-1)); f.dom_y[0] += delta; f.dom_y[1] += delta; } //upper bound else if (f.dom_y[1] > f.channels().length) { - var delta = Math.abs(f.channels().length - f.y_dom[1]); + var delta = Math.abs(f.channels().length - f.dom_y[1]); f.dom_y[0] -= delta; f.dom_y[1] -= delta; } @@ -683,7 +683,6 @@ tv.plot = { }; - f.add_brushes = function () { // horizontal context brush @@ -765,6 +764,7 @@ tv.plot = { }; f.br_ctx_y_fn = br_ctx_y_fn; + br_ctx_end = function () { //get the selected time range @@ -785,7 +785,9 @@ tv.plot = { if (d3.event.selection != null) { //display the selected time range f.text = f.gp_ctx_x.append("text").attr("class", "selected-time") - .text("Selected Time Range " + event_selection_x[0] + " " + event_selection_x[1]) + .text("Selected Time Range: " + event_selection_x[0].toFixed(2) + "ms" + " to " + event_selection_x[1].toFixed(2) + "ms" + + " Interval:" + (event_selection_x[1] - event_selection_x[0]).toFixed(2)); + //update the time in the input tag d3.select("#TimeNow").property('value', event_selection_x[0].toFixed(2)); From aa5d4f0e0220de3cd1089f265d771ebdbaac447f Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 6 Jun 2018 13:43:36 +1000 Subject: [PATCH 12/53] TVB-2370 Display time selection range Add increase/decrease button --- .../new_dual_brain/dual_brain_2d_view.html | 10 +++ .../new_dual_brain/scripts/dualBrainViewer.js | 15 +++++ .../new_dual_brain/scripts/timeseriesD3.js | 63 +++++++++++-------- 3 files changed, 62 insertions(+), 26 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index 0dce8585e..33136a157 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -39,6 +39,7 @@ 0 + @@ -53,10 +54,19 @@ buttonTitle="Select signals from Input %d" % (idx+1))} + +
+ +
+
+
+ + +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index ade8eeffe..29bc8c17f 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -470,8 +470,23 @@ function submitSelectedChannels(isEndOfData) { } +//timeseries viewer var ts = null; +//time selection functions +function intervalIncrease() { + console.log(timeselection_interval); + timeselection_interval+=1; + $("#time-selection-interval").html(timeselection_interval) +} + +function intervalDecrease(){ + timeselection_interval-=1; + $("#time-selection-interval").html(timeselection_interval) +} + + + function resizeToFillParent(ts) { var container, width, height; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index cf25b54b3..2a009a61e 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -776,31 +776,14 @@ tv.plot = { var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; - + timeselection = event_selection_x; // remove the last time's selection - f.gp_ctx_x.select(".selected-time").remove(); + f.gp_ctx_x.selectAll(".selected-time").remove(); //change the actual time point in the slider if (d3.event.selection != null) { - //display the selected time range - f.text = f.gp_ctx_x.append("text").attr("class", "selected-time") - .text("Selected Time Range: " + event_selection_x[0].toFixed(2) + "ms" + " to " + event_selection_x[1].toFixed(2) + "ms" + - " Interval:" + (event_selection_x[1] - event_selection_x[0]).toFixed(2)); - - - //update the time in the input tag - d3.select("#TimeNow").property('value', event_selection_x[0].toFixed(2)); - - //update the time in the 3d viewer's time - $('#slider').slider('value', event_selection_x[0].toFixed(2)); - loadFromTimeStep(parseInt(event_selection_x[0])); - - //need a function to test the slider value - // scyncing betweet the 2D tiem selection and 3D movie - if ($('#slider').slider("option", "value") == parseInt(event_selection_x[1])) { - console.log('stop'); - } + f.timeselection_update_fn() } @@ -830,12 +813,12 @@ tv.plot = { //select a channel var event_selection_y = []; event_selection_y[1] = d3.event.selection[0][1]; - event_selection_y = event_selection_y.map(f.sc_ctx_y.invert) + event_selection_y = event_selection_y.map(f.sc_ctx_y.invert); //choose the time point var event_selection_x = []; event_selection_x[1] = d3.event.selection[0][0]; - event_selection_x = event_selection_x.map(f.sc_ctx_x.invert) + event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); if (event_selection_x[1] < 0) { event_selection_x[1] = 0 + f.da_x; } @@ -843,14 +826,14 @@ tv.plot = { timerange = f.sc_fcs_x.domain()[1]; channelID = parseInt(event_selection_y[1]); - timepoint_length = f.da_lines[channelID].sig.length + timepoint_length = f.da_lines[channelID].sig.length; timepoint = event_selection_x[1] / f.sc_fcs_x.domain()[1]; timepoint = timepoint * timepoint_length; timepoint = parseInt(timepoint); valuearray = f.ys().data; - channel_number = f.channels().length + channel_number = f.channels().length; channel_index = f.channels().indexOf(channelID); //print out the channel name(label) and value @@ -867,15 +850,43 @@ tv.plot = { .on("end", f.br_fcs_endfn).on("start", f.br_fcs_startfn) .on("brush", f.br_fcs_brush); - // add brush groups and add brushes to them + // add time selection brush group f.gp_br_ctx_x = f.gp_ctx_x.append("g"); + //add title for the time selection area + f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10) + f.gp_br_ctx_x.append("g").classed("brush", true).call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); + + //add main focus brush group f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); - f.gp_br_ctx_x.append("g").classed("brush", true).call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); }; + + //functions for the time selection window + f.timeselection_update_fn = function () { + //display the selected time range + f.text = f.gp_ctx_x.append("text").attr("class","selected-time").attr("id","time-selection") + .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); + + f.text_interval=f.gp_ctx_x.append("text").attr("class","selected-time").attr("id","time-selection-interval").text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2)).attr("x", 100).attr("y", -10); + + timeselection_interval=timeselection[1] - timeselection[0]; + + //update the time in the input tag + d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); + + //update the time in the 3d viewer's time + $('#slider').slider('value', timeselection[0].toFixed(2)); + loadFromTimeStep(parseInt(timeselection[0])); + }; + + f.interval_increase=function(){ + timeselection[1]=timeselection[1]+1; + f.timeselection_update_fn() + }; + f.parameters = ["w", "h", "p", "baseURL", "preview", "labels", "shape", "t0", "dt", "ts", "ys", "point_limit", "channels", "mode", "state_var"]; f.parameters.map(function (name) { From 684ecbb3b00901ddfe0d83ee611e0c0293cf6969 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 6 Jun 2018 15:37:37 +1000 Subject: [PATCH 13/53] TVB-2369 Display spheres in the dual viewer Transparency for the region activity viewer Highlight selected spheres and display the message of the channel name --- tvb/adapters/visualizers/new_dual_viewer.py | 6 + .../visualizers/commons/shading/shading.js | 2 + .../shading/vertex_region_opacity.glsl | 26 + .../visualizers/new_dual_brain/controls.html | 14 + .../dual_brain_3d_internal_view.html | 3 +- .../new_dual_brain/dual_brain_3d_view.html | 6 +- .../dual_brain_toggle.controls.html | 59 + .../new_dual_brain/gl_dual_view_header.html | 23 + .../scripts/timeseries3DScript.js | 1403 +++++++++++++++++ .../new_dual_brain/scripts/timeseriesD3.js | 4 + .../new_dual_brain/vertex_shader_opacity.html | 4 + 11 files changed, 1545 insertions(+), 5 deletions(-) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py index 8fd6a9134..4f5ea9595 100644 --- a/tvb/adapters/visualizers/new_dual_viewer.py +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -116,6 +116,12 @@ def launch(self, time_series, projection_surface=None, shell_surface=None): params['urlVertices'] = None params['isSEEG'] = True + if isinstance(time_series, TimeSeriesRegion): + params['withTransparency'] = True + else: + params['withTransparency'] = False + + return self.build_display_result("new_dual_brain/view", params, pages=dict(controlPage="new_dual_brain/controls", channelsPage="commons/channel_selector.html")) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/shading.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/shading.js index 5b48f5eb7..dce9506b6 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/shading.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/shading.js @@ -145,6 +145,8 @@ SHADING_Context.region_progam_init = function(shader, measure_point_nr, legendGr for (let i = 0; i <= measure_point_nr + 1 + legendGranularity; i++) { shader.activityUniform[i] = gl.getUniformLocation(shader, "uActivity[" + i + "]"); } + //for alpha channel in the new dual brain viewer + shader.alphaUniform = gl.getUniformLocation(shader, "uAlpha"); }; /** Init the program that uses both a vertex activity and a vertex color */ diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl new file mode 100644 index 000000000..f86af1d22 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl @@ -0,0 +1,26 @@ +{% include transform.glsl %} +{% include colorscheme.glsl %} +/** + * This shader displays region level activity. The activity is stored in the uniform array. + * aVertexRegion is the mapping from vertices to region indices. + */ +attribute vec3 aVertexPosition; +attribute vec3 aVertexNormal; + +attribute float aVertexRegion; +// 127 is the legend granularity +uniform vec2 uActivity[${abs(noOfMeasurePoints) + 2} + 127]; +//for surface transparency +uniform float uAlpha; + +varying vec4 vColor; +varying vec3 posInterp; +varying vec3 normInterp; + +void main(void) { + transformed_pos(aVertexPosition, aVertexNormal, gl_Position, posInterp, normInterp); + + vec2 uv = uActivity[int(aVertexRegion)]; + vColor = colorSchemeLookup(uv); + vColor.a=uAlpha; +} diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index 494948790..214a77834 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -21,6 +21,20 @@ + + +
+ +
+
+ +
+ + + diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html index 978302e7b..6d09e2c61 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html @@ -2,7 +2,7 @@ xmlns:py="http://genshi.edgewall.org/" xmlns:xi="http://www.w3.org/2001/XInclude"> - + @@ -19,7 +19,6 @@ }); -
test
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html index a216f2cf7..b7915559e 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -1,9 +1,9 @@
- + - + diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html new file mode 100644 index 000000000..504c0dece --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html @@ -0,0 +1,59 @@ + +
+ + +
View hemispheres
+
+ + +
+ + +
+ + +
+
+ +
Lighting
+
+ +
+ +
Show
+
+ +
+ +
+
+ +
+ + +
+
+ +
+
+ +
Toggle
+
+ +
+ +
+
+ +
+
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html new file mode 100644 index 000000000..b007a047b --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html @@ -0,0 +1,23 @@ + +
+ + + + + + + + + + + + + + + + + + + +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js new file mode 100644 index 000000000..0207676fb --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js @@ -0,0 +1,1403 @@ +/** + * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * Web-UI helpful to run brain-simulations. To use it, you also need do download + * TheVirtualBrain-Scientific Package (for simulators). See content of the + * documentation-folder for more details. See also http://www.thevirtualbrain.org + * + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others and others + * + * This program is free software: you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software Foundation, + * either version 3 of the License, or (at your option) any later version. + * This program is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU General Public License for more details. + * You should have received a copy of the GNU General Public License along with this + * program. If not, see . + * + **/ + +/* globals gl, GL_shaderProgram, SHADING_Context */ + +/** + * WebGL methods "inheriting" from webGL_xx.js in static/js. + */ + + +var _alphaValue = 0.1; + +/** + * Change transparency of cortical surface from user-input. + * + * @param inputField user given input value for transparency of cortical-surface + */ +function changeSurfaceTransparency(inputField) { + var newValue = inputField.value; + + if (!isNaN(parseFloat(newValue)) && isFinite(newValue) && parseFloat(newValue) >= 0 && parseFloat(newValue) <= 1) { + _alphaValue = parseFloat(newValue); + } else { + inputField.value = _alphaValue; + displayMessage("Transparency value should be a number between 0 and 1.", "warningMessage"); + } +} + +// below is the modified code from virtualBrain.js + +/* The comment below lists the global functions used in this file. + * It is here to make jshint happy and to document these implicit global dependencies. + * In the future we might group these into namespace objects. + * ( Global state is not in this list except gl and namespaces; let them be warnings ) + */ + +/* globals gl, SHADING_Context, GL_shaderProgram, displayMessage, HLPR_readJSONfromFile, readDataPageURL, + GL_handleKeyDown, GL_handleKeyUp, GL_handleMouseMove, GL_handleMouseWeel, + initGL, updateGLCanvasSize, LEG_updateLegendVerticesBuffers, + basicInitShaders, basicInitSurfaceLighting, GL_initColorPickFrameBuffer, + ColSchGetTheme, LEG_generateLegendBuffers, LEG_initMinMax + */ + +/** + * WebGL methods "inheriting" from webGL_xx.js in static/js. + */ +var BRAIN_CANVAS_ID = "GLcanvas"; +/** + * Variables for displaying Time and computing Frames/Sec + */ +var lastTime = 0; +var framestime = [50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, + 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50]; + +/** + * Time like entities: + * The movie time + * Measured in 'time steps' + * An index in the activitiesData array + * The display time + * Measured in 'ticks' + * Updated every TICK_STEP ms. + * We do not keep the value of this time. + * The displayed movie time + * The int value of it is in currentTimeValue. + * Measured in 'time steps'. + * Synchronizes the movie time to the display time. + */ + +/** + * Granularity of the display time in ms. + */ +var TICK_STEP = 33; // 30Hz +/** + * How many movie time steps for a display tick. + * If this is < 1 a movie frame will last 1/timeStepsPerTick ticks + */ +var timeStepsPerTick = 1; +/** + * The integer part of timeStepsPerTick + */ +var TIME_STEP = 1; +/** + * The current time in the activity movie. + * An index of the current movie frame. + * When timeStepsPerTick > it increments by TIME_STEP every tick. + * When timeStepsPerTick < 1 it increments by 1 every 1/timeStepsPerTick tick. + */ +var currentTimeValue = 0; +/** + * The maximum possible value of currentTimeValue + */ +var MAX_TIME = 0; +/** + * For how many display ticks have we drawn the same time step. + */ +var elapsedTicksPerTimeStep = 0; +/** + * At the maximum speed the time line finishes in 32 steps + * This is approximately 1s wall time (ignoring data fetches). + */ +var ACTIVITY_FRAMES_IN_TIME_LINE_AT_MAX_SPEED = 32; + +var sliderSel = false; + +var isPreview = false; +/** + * This buffer arrays will contain: + * arr[i][0] Vertices buffer + * arr[i][1] Normals buffer + * arr[i][2] Triangles indices buffer + * arr[i][3] Color buffer (same length as vertices /3 * 4) in case of one-to-one mapping + * arr[i][3] Region indexes, when not one-to-one mapping + */ +var brainBuffers = []; +var brainLinesBuffers = []; +var shelfBuffers = []; +var measurePointsBuffers = []; + +var regionBoundariesController = null; + +var activitiesData = [], timeData = [], measurePoints = [], measurePointsLabels = []; + +var pageSize = 0; +var urlBase = ''; +var selectedMode = 0; +var selectedStateVar = 0; +var currentActivitiesFileLength = 0; +var nextActivitiesFileData = []; +var totalPassedActivitiesData = 0; +var shouldIncrementTime = true; +var currentAsyncCall = null; + +var NO_OF_MEASURE_POINTS = 0; +var NEXT_PAGE_THREASHOLD = 100; + +var activityMin = 0, activityMax = 0; +var isOneToOneMapping = false; +var isDoubleView = false; +var isEEGView = false; +var withTransparency = false; +var drawingMode; +var VS_showLegend = true; +var isInternalSensorView = false; + +//display spheres by default +var displayMeasureNodes = true; +var isFaceToDisplay = false; + +var drawNavigator = false; +var drawTriangleLines = false; +var drawSpeculars = false; +/** + * Used to determine which buffer chunks belong to a hemisphere. + * The chunks are used to limit geometry size for a draw call. + */ +var VS_hemisphere_chunk_mask = null; +var bufferSetsMask = null; +var VS_hemisphereVisibility = null; +/** + * What regions are selected to be shown. + * Unselected regions are greyed out. + * This is used only by the brain activity movie for region level activity. + * For static viewers it is initialized to a full selection + */ +var VS_selectedRegions = []; +/** + * camera settings + */ +var near = 0.1; + +// index of the currently selected node. This is equivalent to CONN_pickedIndex +var VS_pickedIndex = -1; + +var VB_BrainNavigator; + +//indicating we are drawing the spheres and applying material colors +var isDrawingSpheres = false; + + +function VS_init_hemisphere_mask(hemisphere_chunk_mask) { + VS_hemisphere_chunk_mask = hemisphere_chunk_mask; + if (hemisphere_chunk_mask !== null && hemisphere_chunk_mask !== undefined) { + bufferSetsMask = []; + for (let i = 0; i < VS_hemisphere_chunk_mask.length; i++) { + bufferSetsMask[i] = 1; + } + } +} + +function VS_SetHemisphere(h) { + VS_hemisphereVisibility = h; + for (let i = 0; i < VS_hemisphere_chunk_mask.length; i++) { + if (h === null || h === undefined) { + bufferSetsMask[i] = 1; + } else if (h === 'l') { + bufferSetsMask[i] = 1 - VS_hemisphere_chunk_mask[i]; + } else if (h === 'r') { + bufferSetsMask[i] = VS_hemisphere_chunk_mask[i]; + } + } +} + +function VS_StartPortletPreview(baseDatatypeURL, urlVerticesList, urlTrianglesList, urlNormalsList, noOfMeasurePoints, + urlRegionMapList, boundaryURL, minActivity, maxActivity, oneToOneMapping) { + isPreview = true; + pageSize = 1; + urlBase = baseDatatypeURL; + activitiesData = HLPR_readJSONfromFile(readDataSplitPageURL(urlBase, 0, 1, selectedStateVar, selectedMode, TIME_STEP)); + if (oneToOneMapping === 'True') { + isOneToOneMapping = true; + } + activityMin = parseFloat(minActivity); + activityMax = parseFloat(maxActivity); + + NO_OF_MEASURE_POINTS = noOfMeasurePoints; + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + VS_selectedRegions.push(i); + } + + const canvas = document.getElementById(BRAIN_CANVAS_ID); + customInitGL(canvas); + initShaders(); + if (urlVerticesList) { + brainBuffers = initBuffers($.parseJSON(urlVerticesList), $.parseJSON(urlNormalsList), $.parseJSON(urlTrianglesList), + $.parseJSON(urlRegionMapList), false); + } + + ColSch_initColorSchemeComponent(activityMin, activityMax); + LEG_initMinMax(activityMin, activityMax); + LEG_generateLegendBuffers(); + + VB_BrainNavigator = new NAV_BrainNavigator(isOneToOneMapping, brainBuffers, measurePoints, measurePointsLabels); + regionBoundariesController = new RB_RegionBoundariesController(boundaryURL); + + // Enable keyboard and mouse interaction + canvas.onkeydown = GL_handleKeyDown; + canvas.onkeyup = GL_handleKeyUp; + canvas.onmousedown = customMouseDown; + canvas.oncontextmenu = function () { + return false; + }; + $(document).on('mousemove', GL_handleMouseMove); + $(document).on('mouseup', customMouseUp); + // We use drawScene instead of tick because tick's performance is worse. + // Portlet previews are static, not movies. Tick's movie update is not required. + // A call to updateColors has to be made to initialize the color buffer. + updateColors(0); + setInterval(drawScene, TICK_STEP); +} + +function _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, + noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelfObject, + hemisphereChunkMask, argDisplayMeasureNodes, argIsFaceToDisplay, + minMeasure, maxMeasure, urlMeasure) { + // initialize global configuration + isDoubleView = false; + isOneToOneMapping = false; + shouldIncrementTime = false; + AG_isStopped = true; + displayMeasureNodes = argDisplayMeasureNodes; + isFaceToDisplay = argIsFaceToDisplay; // this could be retrieved from the dom like drawNavigator + // make checkbox consistent with this flag + $("#displayFaceChkId").attr('checked', isFaceToDisplay); + drawNavigator = $("#showNavigator").prop('checked'); + + if (noOfMeasurePoints === 0) { + // we are viewing a surface with no region mapping + // we mock 1 measure point + measurePoints = [[0, 0, 0]]; + measurePointsLabels = ['']; + NO_OF_MEASURE_POINTS = 1; + // mock one activity frame + activityMin = 0; + activityMax = 1; + activitiesData = [[0]]; + } else { + _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels); + activityMin = parseFloat(minMeasure); + activityMax = parseFloat(maxMeasure); + let measure; + if (urlMeasure === '') { + // Empty url => The static viewer has to show a region map. + // The measure will be a range(NO_OF_MEASURE_POINTS) + measure = []; + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + measure.push(i); + } + } else { + measure = HLPR_readJSONfromFile(urlMeasure); + } + // The activity data will contain just one frame containing the values of the connectivity measure. + activitiesData = [measure]; + } + + VS_showLegend = false; + if (parseFloat(minMeasure) < parseFloat(maxMeasure)) { + const brainLegendDiv = document.getElementById('brainLegendDiv'); + ColSch_updateLegendLabels(brainLegendDiv, minMeasure, maxMeasure, "100%"); + VS_showLegend = true; + } + + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + VS_selectedRegions.push(i); + } + + const canvas = document.getElementById(BRAIN_CANVAS_ID); + _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, + urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphereChunkMask); + + _bindEvents(canvas); + + //specify the re-draw function. + if (_isValidActivityData()) { + setInterval(tick, TICK_STEP); + } +} + +function _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, + urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, + urlRegionMapList, minActivity, maxActivity, + oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, urlMeasurePointsLabels, boundaryURL) { + // initialize global configuration + isDoubleView = doubleView; + if (oneToOneMapping === 'True') { + isOneToOneMapping = true; + } + // these global flags could be structured better + isEEGView = isDoubleView && !isInternalSensorView; + activityMin = parseFloat(minActivity); + activityMax = parseFloat(maxActivity); + pageSize = onePageSize; + urlBase = baseDatatypeURL; + + // initialize global data + _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels); + _initTimeData(urlTimeList); + initActivityData(); + + if (isDoubleView) { + $("#displayFaceChkId").trigger('click'); + } + drawNavigator = $("#showNavigator").prop('checked'); + + const canvas = document.getElementById(BRAIN_CANVAS_ID); + + _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, + urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphereChunkMask); + + _bindEvents(canvas); + + _initSliders(); + + //specify the re-draw function. + if (_isValidActivityData()) { + setInterval(tick, TICK_STEP); + } +} + + +function _VS_init_cubicalMeasurePoints() { + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 3);//3 for the default radius value now, we will modify it later + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } +} + + +function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, + noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, + boundaryURL, shelveObject, minMeasure, maxMeasure, urlMeasure, hemisphereChunkMask) { + + _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, + noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelveObject, + hemisphereChunkMask, false, false, minMeasure, maxMeasure, urlMeasure); + _VS_init_cubicalMeasurePoints(); +} + +function VS_StartEEGSensorViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, + noOfMeasurePoints, urlMeasurePointsLabels, + shelfObject, minMeasure, maxMeasure, urlMeasure) { + isEEGView = true; + _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, + noOfMeasurePoints, '', urlMeasurePointsLabels, '', shelfObject, null, true, true, + minMeasure, maxMeasure, urlMeasure); + _VS_init_cubicalMeasurePoints(); +} + +function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, + urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, + urlRegionMapList, minActivity, maxActivity, + oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, + urlMeasurePointsLabels, boundaryURL, measurePointsSelectionGID, transparencyStatus) { + _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, + urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, + urlRegionMapList, minActivity, maxActivity, + oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, + urlMeasurePointsLabels, boundaryURL); + _VS_init_cubicalMeasurePoints(); + + if (!isDoubleView) { + // If this is a brain activity viewer then we have to initialize the selection component + _initChannelSelection(measurePointsSelectionGID); + // For the double view the selection is the responsibility of the extended view functions + } + withTransparency = transparencyStatus; +} + +function _isValidActivityData() { + if (isOneToOneMapping) { + if (activitiesData.length !== brainBuffers.length) { + displayMessage("The number of activity buffers should equal the number of split surface slices", "errorMessage"); + return false; + } + if (3 * activitiesData[0][0].length !== brainBuffers[0][0].numItems) { + displayMessage("The number of activity points should equal the number of surface vertices", "errorMessage"); + return false; + } + } else { + if (NO_OF_MEASURE_POINTS !== activitiesData[0].length) { + displayMessage("The number of activity points should equal the number of regions", "errorMessage"); + return false; + } + } + return true; +} + +/** + * Scene setup common to all webgl brain viewers + */ +function _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, + urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphere_chunk_mask) { + customInitGL(canvas); + GL_initColorPickFrameBuffer(); + initShaders(); + + if (VS_showLegend) { + LEG_initMinMax(activityMin, activityMax); + ColSch_initColorSchemeGUI(activityMin, activityMax, LEG_updateLegendColors); + LEG_generateLegendBuffers(); + } else { + ColSch_initColorSchemeGUI(activityMin, activityMax); + } + + if (urlVerticesList) { + let parsedIndices = []; + if (urlRegionMapList) { + parsedIndices = $.parseJSON(urlRegionMapList); + } + brainBuffers = initBuffers($.parseJSON(urlVerticesList), $.parseJSON(urlNormalsList), + $.parseJSON(urlTrianglesList), parsedIndices, isDoubleView); + } + + VS_init_hemisphere_mask(hemisphere_chunk_mask); + + brainLinesBuffers = HLPR_getDataBuffers(gl, $.parseJSON(urlLinesList), isDoubleView, true); + regionBoundariesController = new RB_RegionBoundariesController(boundaryURL); + + if (shelfObject) { + shelfObject = $.parseJSON(shelfObject); + shelfBuffers = initBuffers(shelfObject[0], shelfObject[1], shelfObject[2], false, true); + } + + VB_BrainNavigator = new NAV_BrainNavigator(isOneToOneMapping, brainBuffers, measurePoints, measurePointsLabels); +} + + +function _bindEvents(canvas) { + // Enable keyboard and mouse interaction + canvas.onkeydown = GL_handleKeyDown; + canvas.onkeyup = GL_handleKeyUp; + canvas.onmousedown = customMouseDown; + $(document).on('mouseup', customMouseUp); + $(canvas).on('contextmenu', _onContextMenu); + $(document).on('mousemove', GL_handleMouseMove); + + $(canvas).mousewheel(function (event, delta) { + GL_handleMouseWeel(delta); + return false; // prevent default + }); + + if (!isDoubleView) { + const canvasX = document.getElementById('brain-x'); + if (canvasX) { + canvasX.onmousedown = function (event) { + VB_BrainNavigator.moveInXSection(event) + }; + } + const canvasY = document.getElementById('brain-y'); + if (canvasY) { + canvasY.onmousedown = function (event) { + VB_BrainNavigator.moveInYSection(event) + }; + } + const canvasZ = document.getElementById('brain-z'); + if (canvasZ) { + canvasZ.onmousedown = function (event) { + VB_BrainNavigator.moveInZSection(event) + }; + } + } +} + +function _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels) { + if (noOfMeasurePoints > 0) { + measurePoints = HLPR_readJSONfromFile(urlMeasurePoints); + measurePointsLabels = HLPR_readJSONfromFile(urlMeasurePointsLabels); + NO_OF_MEASURE_POINTS = measurePoints.length; + } else { + NO_OF_MEASURE_POINTS = 0; + measurePoints = []; + measurePointsLabels = []; + } +} + +function _initTimeData(urlTimeList) { + const timeUrls = $.parseJSON(urlTimeList); + for (let i = 0; i < timeUrls.length; i++) { + timeData = timeData.concat(HLPR_readJSONfromFile(timeUrls[i])); + } + MAX_TIME = timeData.length - 1; +} + +function _updateSpeedSliderValue(stepsPerTick) { + let s; + if (stepsPerTick >= 1) { + s = stepsPerTick.toFixed(0); + } else { + s = "1/" + (1 / stepsPerTick).toFixed(0); + } + $("#slider-value").html(s); +} + +function _initSliders() { + const maxAllowedTimeStep = Math.ceil(MAX_TIME / ACTIVITY_FRAMES_IN_TIME_LINE_AT_MAX_SPEED); + // after being converted to the exponential range maxSpeed must not exceed maxAllowedTimeStep + const maxSpeedSlider = Math.min(10, 5 + Math.log(maxAllowedTimeStep) / Math.LN2); + + if (timeData.length > 0) { + $("#sliderStep").slider({ + min: 0, max: maxSpeedSlider, step: 1, value: 5, + stop: function () { + refreshCurrentDataSlice(); + sliderSel = false; + }, + slide: function (event, target) { + // convert the linear 0..10 range to the exponential 1/32..1..32 range + const newStep = Math.pow(2, target.value - 5); + setTimeStep(newStep); + _updateSpeedSliderValue(timeStepsPerTick); + sliderSel = true; + } + }); + // Initialize slider for timeLine + $("#slider").slider({ + min: 0, max: MAX_TIME, + slide: function (event, target) { + sliderSel = true; + currentTimeValue = target.value; + $('#TimeNow').val(currentTimeValue); + }, + stop: function (event, target) { + sliderSel = false; + loadFromTimeStep(target.value); + } + }); + } else { + $("#divForSliderSpeed").hide(); + } + _updateSpeedSliderValue(timeStepsPerTick); + + $('#TimeNow').click(function () { + if (!AG_isStopped) { + pauseMovie(); + } + $(this).select(); + }).change(function (ev) { + let val = parseFloat(ev.target.value); + if (val === null || val < 0 || val > MAX_TIME) { + val = 0; + ev.target.value = 0; + } + $('#slider').slider('value', val); + loadFromTimeStep(val); + }); +} + +function _initChannelSelection(selectionGID) { + const vs_regionsSelector = TVBUI.regionSelector("#channelSelector", {filterGid: selectionGID}); + + vs_regionsSelector.change(function (value) { + VS_selectedRegions = []; + for (let i = 0; i < value.length; i++) { + VS_selectedRegions.push(parseInt(value[i], 10)); + } + }); + //sync region filter with initial selection + VS_selectedRegions = []; + const selection = vs_regionsSelector.val(); + for (let i = 0; i < selection.length; i++) { + VS_selectedRegions.push(parseInt(selection[i], 10)); + } + const mode_selector = TVBUI.modeAndStateSelector("#channelSelector", 0); + mode_selector.modeChanged(VS_changeMode); + mode_selector.stateVariableChanged(VS_changeStateVariable); +} + +////////////////////////////////////////// GL Initializations ////////////////////////////////////////// + +function customInitGL(canvas) { + window.onresize = function () { + updateGLCanvasSize(BRAIN_CANVAS_ID); + LEG_updateLegendVerticesBuffers(); + }; + initGL(canvas); + drawingMode = gl.TRIANGLES; + gl.newCanvasWidth = canvas.clientWidth; + gl.newCanvasHeight = canvas.clientHeight; + canvas.redrawFunctionRef = drawScene; // interface-like function used in HiRes image exporting + canvas.multipleImageExport = VS_multipleImageExport; + + gl.clearDepth(1.0); + gl.enable(gl.DEPTH_TEST); + gl.depthFunc(gl.LEQUAL); +} + +/** This callback handles image exporting from this canvas.*/ +function VS_multipleImageExport(saveFigure) { + const canvas = this; + + function saveFrontBack(nameFront, nameBack) { + mvPushMatrix(); + // front + canvas.drawForImageExport(); + saveFigure({suggestedName: nameFront}); + // back: rotate model around the vertical y axis in trackball space (almost camera space: camera has a z translation) + const r = createRotationMatrix(180, [0, 1, 0]); + GL_mvMatrix = GL_cameraMatrix.x(r.x(GL_trackBallMatrix)); + canvas.drawForImageExport(); + saveFigure({suggestedName: nameBack}); + mvPopMatrix(); + } + + // using drawForImageExport because it handles resizing canvas for export + // It is set on canvas in initGL and defers to drawscene. + + if (VS_hemisphere_chunk_mask !== null) { // we have 2 hemispheres + if (VS_hemisphereVisibility === null) { // both are visible => take them apart when taking picture + VS_SetHemisphere('l'); + saveFrontBack('brain-LH-front', 'brain-LH-back'); + VS_SetHemisphere('r'); + saveFrontBack('brain-RH-front', 'brain-RH-back'); + VS_SetHemisphere(VS_hemisphereVisibility); + } else if (VS_hemisphereVisibility === 'l') { // LH is visible => take picture of it only + saveFrontBack('brain-LH-front', 'brain-LH-back'); + } else if (VS_hemisphereVisibility === 'r') { + saveFrontBack('brain-RH-front', 'brain-RH-back'); + } + } else { + // just save front-back view if no hemispheres + saveFrontBack('brain-front', 'brain-back'); + } +} + +function initShaders() { + createAndUseShader("shader-fs", "shader-vs"); + if (isOneToOneMapping) { + SHADING_Context.one_to_one_program_init(GL_shaderProgram); + } else { + SHADING_Context.region_progam_init(GL_shaderProgram, NO_OF_MEASURE_POINTS, legendGranularity); + } +} + +///////////////////////////////////////~~~~~~~~START MOUSE RELATED CODE~~~~~~~~~~~////////////////////////////////// + + +function _onContextMenu() { + if (!displayMeasureNodes || VS_pickedIndex === -1) { + return false; + } + doPick = true; + drawScene(); + $('#nodeNameId').text(measurePointsLabels[VS_pickedIndex]); + $('#contextMenuDiv').css('left', event.offsetX).css('top', event.offsetY).show(); + return false; +} + +var doPick = false; + +function customMouseDown(event) { + GL_handleMouseDown(event, $("#" + BRAIN_CANVAS_ID)); + $('#contextMenuDiv').hide(); + VB_BrainNavigator.temporaryDisableInTimeRefresh(); + if (displayMeasureNodes) { + doPick = true; + } + +} + +function customMouseUp(event) { + GL_handleMouseUp(event); + VB_BrainNavigator.endTemporaryDisableInTimeRefresh(); +} + +/////////////////////////////////////////~~~~~~~~END MOUSE RELATED CODE~~~~~~~~~~~////////////////////////////////// + + +////////////////////////////////////////~~~~~~~~~ WEB GL RELATED RENDERING ~~~~~~~///////////////////////////////// +/** + * Update colors for all Positions on the brain. + */ + +function updateColors(currentTimeInFrame) { + const col = ColSchInfo(); + const activityRange = ColSchGetBounds(); + SHADING_Context.colorscheme_set_uniforms(GL_shaderProgram, activityRange.min, activityRange.max, + activityRange.bins, activityRange.centralHoleDiameter); + + if (isOneToOneMapping) { + for (let i = 0; i < brainBuffers.length; i++) { + const activity = new Float32Array(activitiesData[i][currentTimeInFrame]); + gl.bindBuffer(gl.ARRAY_BUFFER, brainBuffers[i][3]); + gl.bufferData(gl.ARRAY_BUFFER, activity, gl.STATIC_DRAW); + gl.uniform1f(GL_shaderProgram.colorSchemeUniform, col.tex_v); + } + } else { + const currentActivity = activitiesData[currentTimeInFrame]; + for (let ii = 0; ii < NO_OF_MEASURE_POINTS; ii++) { + if (VS_selectedRegions.indexOf(ii) !== -1) { + gl.uniform2f(GL_shaderProgram.activityUniform[ii], currentActivity[ii], col.tex_v); + } else { + gl.uniform2f(GL_shaderProgram.activityUniform[ii], currentActivity[ii], col.muted_tex_v); + } + } + // default color for a measure point + gl.uniform2f(GL_shaderProgram.activityUniform[NO_OF_MEASURE_POINTS], activityMin, col.measurePoints_tex_v); + // color used for a picked measure point + gl.uniform2f(GL_shaderProgram.activityUniform[NO_OF_MEASURE_POINTS + 1], activityMax, col.measurePoints_tex_v); + } +} + +function toggleMeasureNodes() { + displayMeasureNodes = !displayMeasureNodes; +} + + +function switchFaceObject() { + isFaceToDisplay = !isFaceToDisplay; +} + +/** + * Draw model with filled Triangles of isolated Points (Vertices). + */ +function wireFrame() { + if (drawingMode === gl.POINTS) { + drawingMode = gl.TRIANGLES; + } else { + drawingMode = gl.POINTS; + } +} + +/** + * Sets a new movie speed. + * To stop the movie set AG_isStopped to true rather than passing 0 here. + */ +function setTimeStep(newTimeStepsPerTick) { + timeStepsPerTick = newTimeStepsPerTick; + if (timeStepsPerTick < 1) { // subunit speed + TIME_STEP = 1; + } else { + TIME_STEP = Math.floor(timeStepsPerTick); + } +} + +function resetSpeedSlider() { + setTimeStep(1); + $("#sliderStep").slider("option", "value", 1); + refreshCurrentDataSlice(); +} + +function setNavigatorVisibility(enable) { + drawNavigator = enable; +} + +function toggleDrawTriangleLines() { + drawTriangleLines = !drawTriangleLines; +} + +function toggleDrawBoundaries() { + regionBoundariesController.toggleBoundariesVisibility(); +} + +function setSpecularHighLights(enable) { + drawSpeculars = enable; +} + +/** + * Creates a list of webGl buffers. + * + * @param dataList a list of lists. Each list will contain the data needed for creating a gl buffer. + */ +function createWebGlBuffers(dataList) { + const result = []; + for (let i = 0; i < dataList.length; i++) { + const buffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, buffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(dataList[i]), gl.STATIC_DRAW); + buffer.numItems = dataList[i].length; + result.push(buffer); + } + + return result; +} + +/** + * Read data from the specified urls. + * + * @param data_url_list a list of urls from where it should read the data + * @param staticFiles true if the urls points to some static files + */ +function readFloatData(data_url_list, staticFiles) { + const result = []; + for (let i = 0; i < data_url_list.length; i++) { + let data_json = HLPR_readJSONfromFile(data_url_list[i], staticFiles); + if (staticFiles) { + for (let j = 0; j < data_json.length; j++) { + data_json[j] = parseFloat(data_json[j]); + } + } + result.push(data_json); + data_json = null; + } + return result; +} + +/** + * Computes the data for alpha and alphasIndices. + * + * @param vertices a list which contains lists of vertices. E.g.: [[slice_1_vertices],...,[slice_n_vertices]] + * @param measurePoints a list which contains all the measure points. E.g.: [[x0,y0,z0],[x1,y1,z1],...] + */ +function computeVertexRegionMap(vertices, measurePoints) { + const vertexRegionMap = []; + for (let i = 0; i < vertices.length; i++) { + const reg = []; + for (let j = 0; j < vertices[i].length / 3; j++) { + const currentVertex = vertices[i].slice(j * 3, (j + 1) * 3); + const closestPosition = NAV_BrainNavigator.findClosestPosition(currentVertex, measurePoints); + reg.push(closestPosition); + } + vertexRegionMap.push(reg); + } + return vertexRegionMap; +} + + +/** + * Method used for creating a color buffer for a cube (measure point). + * + * @param isPicked If true then the color used will be + * the one used for drawing the measure points for which the + * corresponding eeg channels are selected. + */ +function createColorBufferForCube(isPicked) { + let pointColor = []; + if (isOneToOneMapping) { + pointColor = [0.34, 0.95, 0.37, 1.0]; + if (isPicked) { + pointColor = [0.99, 0.99, 0.0, 1.0]; + } + } else { + pointColor = [NO_OF_MEASURE_POINTS]; + if (isPicked) { + pointColor = [NO_OF_MEASURE_POINTS + 1]; + } + } + let colors = []; + for (let i = 0; i < 24; i++) { + colors = colors.concat(pointColor); + } + const cubeColorBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, cubeColorBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW); + return cubeColorBuffer; +} + +function initBuffers(urlVertices, urlNormals, urlTriangles, urlRegionMap, staticFiles) { + const verticesData = readFloatData(urlVertices, staticFiles); + const vertexBatches = createWebGlBuffers(verticesData); + const normals = HLPR_getDataBuffers(gl, urlNormals, staticFiles); + const indexes = HLPR_getDataBuffers(gl, urlTriangles, staticFiles, true); + + let vertexRegionMap; + if (!isOneToOneMapping) { + if (urlRegionMap && urlRegionMap.length) { + vertexRegionMap = HLPR_getDataBuffers(gl, urlRegionMap); + } else if (isEEGView) { + // if is eeg view than we use the static surface 'eeg_skin_surface' and we have to compute the vertexRegionMap; + // todo: do this on the server to eliminate this special case + const regionData = computeVertexRegionMap(verticesData, measurePoints); + vertexRegionMap = createWebGlBuffers(regionData); + } else { + // Fake buffers, copy of the normals, in case of transparency, we only need dummy ones. + vertexRegionMap = normals; + } + } + + const result = []; + for (let i = 0; i < vertexBatches.length; i++) { + if (isOneToOneMapping) { + const activityBuffer = gl.createBuffer(); + gl.bindBuffer(gl.ARRAY_BUFFER, activityBuffer); + gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexBatches[i].numItems), gl.STATIC_DRAW); + result.push([vertexBatches[i], normals[i], indexes[i], activityBuffer]); + } else { + result.push([vertexBatches[i], normals[i], indexes[i], vertexRegionMap[i]]); + } + } + return result; +} + +/** + * Make a draw call towards the GL_shaderProgram compiled from common/vertex_shader common_fragment_shader + * Note: all attributes have to be bound even if the shader does not explicitly use them (ex picking mode) + * @param drawMode Triangles / Points + * @param buffers Buffers to be drawn. Array of (vertices, normals, triangles, colors) for one to one mappings + * Array of (vertices, normals, triangles, alphas, alphaindices) for region based drawing + */ +function drawBuffer(drawMode, buffers) { + setMatrixUniforms(); + if (isOneToOneMapping) { + SHADING_Context.one_to_one_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); + } else if (isDrawingSpheres) { + SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[0], buffers[2], drawMode); + } else { + SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); + } + + +} + +/** + * + * @param drawMode Triangles / Points + * @param buffersSets Actual buffers to be drawn. Array or (vertices, normals, triangles) + * @param [bufferSetsMask] Optional. If this array has a 0 at index i then the buffer at index i is not drawn + * @param [useBlending] When true, the object is drawn with blending (for transparency) + * @param [cullFace] When gl.FRONT, it will mark current object to be drown twice (another with gl.BACK). + * It should be set to GL.FRONT for objects transparent and convex. + */ +function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFace) { + let lightSettings = null; + if (useBlending) { + lightSettings = setLighting(blendingLightSettings); + gl.enable(gl.BLEND); + gl.blendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD); + gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA); + // Blending function for alpha: transparent pix blended over opaque -> opaque pix + if (cullFace) { + gl.enable(gl.CULL_FACE); + if (withTransparency) { + gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); + } + else { + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + } + gl.cullFace(cullFace); + } + } + + for (let i = 0; i < buffersSets.length; i++) { + if (bufferSetsMask !== null && bufferSetsMask !== undefined && !bufferSetsMask[i]) { + continue; + } + + //display spheres with full alpha value + if (isDrawingSpheres) { + gl.uniform1i(GL_shaderProgram.useVertexColors, false); + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + + // set sphere color green for the picked ones and yellow for the others + if (i == VS_pickedIndex) { + gl.uniform4f(GL_shaderProgram.materialColor, 0.34, 0.95, 0.37, 1.0); + drawBuffer(drawMode, buffersSets[i]); + } + else { + gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); + drawBuffer(drawMode, buffersSets[i]); + } + gl.uniform1i(GL_shaderProgram.useVertexColors, true); + + if (withTransparency) { + gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); + } + } + else { + drawBuffer(drawMode, buffersSets[i]); + + } + + } + + if (useBlending) { + gl.disable(gl.CULL_FACE); + setLighting(lightSettings); + + // Draw the same transparent object the second time + if (cullFace === gl.FRONT) { + drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, gl.BACK); + } + + } +} + + +function drawBrainLines(linesBuffers, brainBuffers, bufferSetsMask) { + let lightSettings = null; + if (drawingMode !== gl.POINTS) { + // Usually draw the wire-frame with the same color. But in points mode draw with the vertex colors. + lightSettings = setLighting(linesLightSettings); + } + gl.lineWidth(1.0); + // we want all the brain buffers in this set except the element array buffer (at index 2) + let bufferSets = []; + for (let c = 0; c < brainBuffers.length; c++) { + let chunk = brainBuffers[c].slice(); + chunk[2] = linesBuffers[c]; + bufferSets.push(chunk); + } + drawBuffers(gl.LINES, bufferSets, bufferSetsMask); + if (drawingMode !== gl.POINTS) { + setLighting(lightSettings); + } +} + +/** + * Actual scene drawing step. + */ +function tick() { + + if (sliderSel) { + return; + } + + //// Update activity buffers to be drawn at next step + // If we are in the middle of waiting for the next data file just + // stop and wait since we might have an index that is 'out' of this data slice + if (!AG_isStopped) { + // Synchronizes display time with movie time + let shouldStep = false; + if (timeStepsPerTick >= 1) { + shouldStep = true; + } else if (elapsedTicksPerTimeStep >= (1 / timeStepsPerTick)) { + shouldStep = true; + elapsedTicksPerTimeStep = 0; + } else { + elapsedTicksPerTimeStep += 1; + } + + if (shouldStep && shouldIncrementTime) { + currentTimeValue = currentTimeValue + TIME_STEP; + } + + if (currentTimeValue > MAX_TIME) { + // Next time value is no longer in activity data. + initActivityData(); + if (isDoubleView) { + loadEEGChartFromTimeStep(0); + drawGraph(false, 0); + } + shouldStep = false; + } + + if (shouldStep) { + if (shouldLoadNextActivitiesFile()) { + loadNextActivitiesFile(); + } + if (shouldChangeCurrentActivitiesFile()) { + changeCurrentActivitiesFile(); + } + if (isDoubleView) { + drawGraph(true, TIME_STEP); + } + } + } + + const currentTimeInFrame = Math.floor((currentTimeValue - totalPassedActivitiesData) / TIME_STEP); + updateColors(currentTimeInFrame); + + drawScene(); + + /// Update FPS and Movie timeline + if (!isPreview) { + const timeNow = new Date().getTime(); + const elapsed = timeNow - lastTime; + + if (lastTime !== 0) { + framestime.shift(); + framestime.push(elapsed); + } + + lastTime = timeNow; + if (timeData.length > 0 && !AG_isStopped) { + document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); + } + let meanFrameTime = 0; + for (let i = 0; i < framestime.length; i++) { + meanFrameTime += framestime[i]; + } + meanFrameTime = meanFrameTime / framestime.length; + document.getElementById("FramesPerSecond").innerHTML = Math.floor(1000 / meanFrameTime).toFixed(); + if (!sliderSel && !AG_isStopped) { + $("#slider").slider("option", "value", currentTimeValue); + } + } +} + +/** + * Draw from buffers. + */ +function drawScene() { + + const theme = ColSchGetTheme().surfaceViewer; + gl.clearColor(theme.backgroundColor[0], theme.backgroundColor[1], theme.backgroundColor[2], theme.backgroundColor[3]); + gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight); + + // Draw sections before setting the correct draw perspective, to work with "rel-time refresh of sections" + VB_BrainNavigator.maybeRefreshSections(); + + // View angle is 45, we want to see object from near up to 800 distance from camera + perspective(45, gl.viewportWidth / gl.viewportHeight, near, 800.0); + + mvPushMatrix(); + mvRotate(180, [0, 0, 1]); + + if (!doPick) { + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + + if (drawSpeculars) { + setLighting(specularLightSettings); + } else { + setLighting(); + } + + if (VS_showLegend) { + mvPushMatrix(); + loadIdentity(); + drawBuffers(gl.TRIANGLES, [LEG_legendBuffers]); + mvPopMatrix(); + } + + if (isInternalSensorView) { + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + drawBuffers(gl.TRIANGLES, measurePointsBuffers); + } else { + //draw the nodes first to make it appear + if (displayMeasureNodes) { + isDrawingSpheres = true; + + + drawBuffers(gl.TRIANGLES, measurePointsBuffers); + isDrawingSpheres = false; + + } + + // draw surface + drawBuffers(drawingMode, brainBuffers, bufferSetsMask); + + regionBoundariesController.drawRegionBoundaries(drawingMode, brainBuffers); + + if (drawTriangleLines) { + drawBrainLines(brainLinesBuffers, brainBuffers, bufferSetsMask); + } + + } + + if (isFaceToDisplay) { + const faceDrawMode = isInternalSensorView ? drawingMode : gl.TRIANGLES; + mvPushMatrix(); + mvTranslate(VB_BrainNavigator.getPosition()); + drawBuffers(faceDrawMode, shelfBuffers, null, true, gl.FRONT); + mvPopMatrix(); + } + + if (drawNavigator) { + VB_BrainNavigator.drawNavigator(); + } + + + } else { + gl.bindFramebuffer(gl.FRAMEBUFFER, GL_colorPickerBuffer); + gl.disable(gl.DITHER); + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + setLighting(pickingLightSettings); + + if (GL_colorPickerInitColors.length === 0) { + GL_initColorPickingData(NO_OF_MEASURE_POINTS); + } + + + isDrawingSpheres = true; + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + const mpColor = GL_colorPickerInitColors[i]; + gl.uniform4fv(GL_shaderProgram.materialColor, mpColor); + drawBuffer(gl.TRIANGLES, measurePointsBuffers[i]); + } + isDrawingSpheres = false; + + VS_pickedIndex = GL_getPickedIndex(); + //display the channel name + if (VS_pickedIndex != -1) { + displayMessage("The highlighted node is " + measurePointsLabels[VS_pickedIndex], "infoMessage") + + } + doPick = false; + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + } + + mvPopMatrix(); +} + +////////////////////////////////////////~~~~~~~~~ END WEB GL RELATED RENDERING ~~~~~~~///////////////////////////////// + + +/////////////////////////////////////// ~~~~~~~~~~ DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// + +/** + * Change the currently selected state variable. Get the newly selected value, reset the currentTimeValue to start + * and read the first page of the new mode/state var combination. + */ +function VS_changeStateVariable(id, val) { + selectedStateVar = val; + $("#slider").slider("option", "value", currentTimeValue); + initActivityData(); +} + +/** + * Change the currently selected mode. Get the newly selected value, reset the currentTimeValue to start + * and read the first page of the new mode/state var combination. + */ +function VS_changeMode(id, val) { + selectedMode = val; + $("#slider").slider("option", "value", currentTimeValue); + initActivityData(); +} + +/** + * Just read the first slice of activity data and set the time step to 0. + */ +function initActivityData() { + currentTimeValue = 0; + //read the first file + const initUrl = getUrlForPageFromIndex(0); + activitiesData = HLPR_readJSONfromFile(initUrl); + if (activitiesData !== null && activitiesData !== undefined) { + currentActivitiesFileLength = activitiesData.length * TIME_STEP; + totalPassedActivitiesData = 0; + } +} + +/** + * Load the brainviewer from this given time step. + */ +function loadFromTimeStep(step) { + showBlockerOverlay(50000); + if (step % TIME_STEP !== 0) { + step = step - step % TIME_STEP + TIME_STEP; // Set time to be multiple of step + } + const nextUrl = getUrlForPageFromIndex(step); + currentAsyncCall = null; + readFileData(nextUrl, false); + currentTimeValue = step; + activitiesData = nextActivitiesFileData.slice(0); + nextActivitiesFileData = null; + currentActivitiesFileLength = activitiesData.length * TIME_STEP; + totalPassedActivitiesData = currentTimeValue; + // Also sync eeg monitor if in double view + if (isDoubleView) { + loadEEGChartFromTimeStep(step); + } + closeBlockerOverlay(); +} + +/** + * Refresh the current data with the new time step. + */ +function refreshCurrentDataSlice() { + if (currentTimeValue % TIME_STEP !== 0) { + currentTimeValue = currentTimeValue - currentTimeValue % TIME_STEP + TIME_STEP; // Set time to be multiple of step + } + loadFromTimeStep(currentTimeValue); +} + +/** + * Generate the url that reads one page of data starting from @param index + */ +function getUrlForPageFromIndex(index) { + let fromIdx = index; + if (fromIdx > MAX_TIME) { + fromIdx = 0; + } + const toIdx = fromIdx + pageSize * TIME_STEP; + return readDataSplitPageURL(urlBase, fromIdx, toIdx, selectedStateVar, selectedMode, TIME_STEP); +} + +/** + * If we are at the last NEXT_PAGE_THRESHOLD points of data we should start loading the next data file + * to get an animation as smooth as possible. + */ +function shouldLoadNextActivitiesFile() { + + if (!isPreview && (currentAsyncCall === null) && ((currentTimeValue - totalPassedActivitiesData + NEXT_PAGE_THREASHOLD * TIME_STEP) >= currentActivitiesFileLength)) { + if (nextActivitiesFileData === null || nextActivitiesFileData.length === 0) { + return true; + } + } + return false; +} + +/** + * Start a new async call that should load required data for the next activity slice. + */ +function loadNextActivitiesFile() { + const nextFileIndex = totalPassedActivitiesData + currentActivitiesFileLength; + const nextUrl = getUrlForPageFromIndex(nextFileIndex); + const asyncCallId = new Date().getTime(); + currentAsyncCall = asyncCallId; + readFileData(nextUrl, true, asyncCallId); +} + +/** + * If the next time value is bigger that the length of the current activity loaded data + * that means it's time to switch to the next activity data slice. + */ +function shouldChangeCurrentActivitiesFile() { + return ((currentTimeValue + TIME_STEP - totalPassedActivitiesData) >= currentActivitiesFileLength); +} + +/** + * We've reached the end of the current activity chunk. Time to switch to + * the next one. + */ +function changeCurrentActivitiesFile() { + if (nextActivitiesFileData === null || !nextActivitiesFileData.length) { + // Async data call was not finished, stop incrementing call and wait for data. + shouldIncrementTime = false; + return; + } + + activitiesData = nextActivitiesFileData.slice(0); + nextActivitiesFileData = null; + totalPassedActivitiesData = totalPassedActivitiesData + currentActivitiesFileLength; + currentActivitiesFileLength = activitiesData.length * TIME_STEP; + currentAsyncCall = null; + if (activitiesData && activitiesData.length) { + shouldIncrementTime = true; + } + if (totalPassedActivitiesData >= MAX_TIME) { + totalPassedActivitiesData = 0; + } +} + + +function readFileData(fileUrl, async, callIdentifier) { + nextActivitiesFileData = null; + // Keep a call identifier so we don't "intersect" async calls when two + // async calls are started before the first one finishes. + const self = this; + self.callIdentifier = callIdentifier; + doAjaxCall({ + url: fileUrl, + async: async, + success: function (data) { + if ((self.callIdentifier === currentAsyncCall) || !async) { + nextActivitiesFileData = eval(data); + data = null; + } + } + }); +} + + +/////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index 2a009a61e..3fb2a8f45 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -33,6 +33,10 @@ /* global tv, d3 */ +//will store the interval and time selection range +var timeselection_interval=0; +var timeselection=[]; + tv = {}; tv.util = { diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html new file mode 100644 index 000000000..2e3138436 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html @@ -0,0 +1,4 @@ + \ No newline at end of file From de5c56b85ec9399b20c130eff9e6d206925bab4c Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 7 Jun 2018 13:51:58 +1000 Subject: [PATCH 14/53] TVB-2369 Fix shader's directory --- .../visualizers/commons/shading/vertex_region_opacity.glsl | 2 +- .../visualizers/new_dual_brain/vertex_shader_opacity.html | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl index f86af1d22..bf879bfbf 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl @@ -22,5 +22,5 @@ void main(void) { vec2 uv = uActivity[int(aVertexRegion)]; vColor = colorSchemeLookup(uv); - vColor.a=uAlpha; + vColor.a = uAlpha; } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html index 2e3138436..3993fcdfb 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html @@ -1,4 +1,4 @@ \ No newline at end of file From 1a322c1db43c5b3876cf85eb0d4109d55081906a Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 7 Jun 2018 14:44:01 +1000 Subject: [PATCH 15/53] TVB-2370 Fix brush's styling --- .../static/style/subsection_timeseries.css | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/tvb/interfaces/web/static/style/subsection_timeseries.css b/tvb/interfaces/web/static/style/subsection_timeseries.css index 5578e7ae7..b95e7c415 100644 --- a/tvb/interfaces/web/static/style/subsection_timeseries.css +++ b/tvb/interfaces/web/static/style/subsection_timeseries.css @@ -25,6 +25,32 @@ color: #000; } +/* !------------------------------------------------------- */ +/* ! COMPONENT: New dual brain viewer */ +/* !------------------------------------------------------- */ + +/*overlay changed in d3v4 for new dual brain viewer*/ +.overlay{ + visibility: hidden; +} + +/*universal html font styling not working while using d3v4 in the new dual brain viewer*/ +text{ + font-size:small; +} + +/*d3v4 extent=>selection*/ +rect.selection { + stroke: red; + +} + +/*remove the styling for the lefright handle*/ +rect.handle.handle--e, rect.handle.handle--w { + stroke:none !important; +} + + /* !------------------------------------------------------- */ /* ! COMPONENT: Volume Time Series Visualizer LEFT COLUMN */ /* !------------------------------------------------------- */ From 988ebf435484e5ff2ef79f1d5547159dd0da8402 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 7 Jun 2018 18:39:19 +1000 Subject: [PATCH 16/53] TVB-2370 Fix all the stroke boundary styling in the brush Trying to use d3v5 --- tvb/interfaces/web/static/js/d3.v5.min.js | 2 ++ tvb/interfaces/web/static/style/subsection_timeseries.css | 6 +++--- .../visualizers/new_dual_brain/dual_brain_2d_view.html | 3 +-- 3 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 tvb/interfaces/web/static/js/d3.v5.min.js diff --git a/tvb/interfaces/web/static/js/d3.v5.min.js b/tvb/interfaces/web/static/js/d3.v5.min.js new file mode 100644 index 000000000..d715d22e0 --- /dev/null +++ b/tvb/interfaces/web/static/js/d3.v5.min.js @@ -0,0 +1,2 @@ +// https://d3js.org Version 5.4.0. Copyright 2018 Mike Bostock. +(function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n(t.d3=t.d3||{})})(this,function(t){"use strict";function n(t,n){return tn?1:t>=n?0:NaN}function e(t){return 1===t.length&&(t=function(t){return function(e,r){return n(t(e),r)}}(t)),{left:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)<0?r=o+1:i=o}return r},right:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)>0?i=o:r=o+1}return r}}}function r(t,n){return[t,n]}function i(t){return null===t?NaN:+t}function o(t,n){var e,r,o=t.length,a=0,u=-1,f=0,c=0;if(null==n)for(;++u1)return c/(a-1)}function a(t,n){var e=o(t,n);return e?Math.sqrt(e):e}function u(t,n){var e,r,i,o=t.length,a=-1;if(null==n){for(;++a=e)for(r=i=e;++ae&&(r=e),i=e)for(r=i=e;++ae&&(r=e),i0)return[t];if((r=n0)for(t=Math.ceil(t/a),n=Math.floor(n/a),o=new Array(i=Math.ceil(n-t+1));++u=0?(o>=rs?10:o>=is?5:o>=os?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(o>=rs?10:o>=is?5:o>=os?2:1)}function d(t,n,e){var r=Math.abs(n-t)/Math.max(0,e),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),o=r/i;return o>=rs?i*=10:o>=is?i*=5:o>=os&&(i*=2),n=1)return+e(t[r-1],r-1,t);var r,o=(r-1)*n,a=Math.floor(o),u=+e(t[a],a,t);return u+(+e(t[a+1],a+1,t)-u)*(o-a)}}function g(t,n){var e,r,i=t.length,o=-1;if(null==n){for(;++o=e)for(r=e;++or&&(r=e)}else for(;++o=e)for(r=e;++or&&(r=e);return r}function y(t){for(var n,e,r,i=t.length,o=-1,a=0;++o=0;)for(n=(r=t[i]).length;--n>=0;)e[--a]=r[n];return e}function _(t,n){var e,r,i=t.length,o=-1;if(null==n){for(;++o=e)for(r=e;++oe&&(r=e)}else for(;++o=e)for(r=e;++oe&&(r=e);return r}function b(t){if(!(i=t.length))return[];for(var n=-1,e=_(t,m),r=new Array(e);++n=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),ps.hasOwnProperty(n)?{space:ps[n],local:t}:t}function C(t){var n=k(t);return(n.local?function(t){return function(){return this.ownerDocument.createElementNS(t.space,t.local)}}:function(t){return function(){var n=this.ownerDocument,e=this.namespaceURI;return e===ds&&n.documentElement.namespaceURI===ds?n.createElement(t):n.createElementNS(e,t)}})(n)}function P(){}function z(t){return null==t?P:function(){return this.querySelector(t)}}function R(){return[]}function L(t){return null==t?R:function(){return this.querySelectorAll(t)}}function D(t){return new Array(t.length)}function U(t,n){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=n}function q(t,n,e,r,i,o){for(var a,u=0,f=n.length,c=o.length;un?1:t>=n?0:NaN}function B(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function F(t,n){return t.style.getPropertyValue(n)||B(t).getComputedStyle(t,null).getPropertyValue(n)}function I(t){return t.trim().split(/^|\s+/)}function j(t){return t.classList||new H(t)}function H(t){this._node=t,this._names=I(t.getAttribute("class")||"")}function X(t,n){for(var e=j(t),r=-1,i=n.length;++r>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1)):(n=Ss.exec(t))?Ct(parseInt(n[1],16)):(n=Es.exec(t))?new Lt(n[1],n[2],n[3],1):(n=ks.exec(t))?new Lt(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=Cs.exec(t))?Pt(n[1],n[2],n[3],n[4]):(n=Ps.exec(t))?Pt(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=zs.exec(t))?Ut(n[1],n[2]/100,n[3]/100,1):(n=Rs.exec(t))?Ut(n[1],n[2]/100,n[3]/100,n[4]):Ls.hasOwnProperty(t)?Ct(Ls[t]):"transparent"===t?new Lt(NaN,NaN,NaN,0):null}function Ct(t){return new Lt(t>>16&255,t>>8&255,255&t,1)}function Pt(t,n,e,r){return r<=0&&(t=n=e=NaN),new Lt(t,n,e,r)}function zt(t){return t instanceof Et||(t=kt(t)),t?(t=t.rgb(),new Lt(t.r,t.g,t.b,t.opacity)):new Lt}function Rt(t,n,e,r){return 1===arguments.length?zt(t):new Lt(t,n,e,null==r?1:r)}function Lt(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function Dt(t){return((t=Math.max(0,Math.min(255,Math.round(t)||0)))<16?"0":"")+t.toString(16)}function Ut(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new Ot(t,n,e,r)}function qt(t,n,e,r){return 1===arguments.length?function(t){if(t instanceof Ot)return new Ot(t.h,t.s,t.l,t.opacity);if(t instanceof Et||(t=kt(t)),!t)return new Ot;if(t instanceof Ot)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),a=NaN,u=o-i,f=(o+i)/2;return u?(a=n===o?(e-r)/u+6*(e0&&f<1?0:a,new Ot(a,u,f,t.opacity)}(t):new Ot(t,n,e,null==r?1:r)}function Ot(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Yt(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}function Bt(t){if(t instanceof It)return new It(t.l,t.a,t.b,t.opacity);if(t instanceof Wt){if(isNaN(t.h))return new It(t.l,0,0,t.opacity);var n=t.h*Ds;return new It(t.l,Math.cos(n)*t.c,Math.sin(n)*t.c,t.opacity)}t instanceof Lt||(t=zt(t));var e,r,i=Gt(t.r),o=Gt(t.g),a=Gt(t.b),u=jt((.2225045*i+.7168786*o+.0606169*a)/Os);return i===o&&o===a?e=r=u:(e=jt((.4360747*i+.3850649*o+.1430804*a)/qs),r=jt((.0139322*i+.0971045*o+.7141733*a)/Ys)),new It(116*u-16,500*(e-u),200*(u-r),t.opacity)}function Ft(t,n,e,r){return 1===arguments.length?Bt(t):new It(t,n,e,null==r?1:r)}function It(t,n,e,r){this.l=+t,this.a=+n,this.b=+e,this.opacity=+r}function jt(t){return t>js?Math.pow(t,1/3):t/Is+Bs}function Ht(t){return t>Fs?t*t*t:Is*(t-Bs)}function Xt(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Gt(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Vt(t){if(t instanceof Wt)return new Wt(t.h,t.c,t.l,t.opacity);if(t instanceof It||(t=Bt(t)),0===t.a&&0===t.b)return new Wt(NaN,0,t.l,t.opacity);var n=Math.atan2(t.b,t.a)*Us;return new Wt(n<0?n+360:n,Math.sqrt(t.a*t.a+t.b*t.b),t.l,t.opacity)}function $t(t,n,e,r){return 1===arguments.length?Vt(t):new Wt(t,n,e,null==r?1:r)}function Wt(t,n,e,r){this.h=+t,this.c=+n,this.l=+e,this.opacity=+r}function Zt(t,n,e,r){return 1===arguments.length?function(t){if(t instanceof Qt)return new Qt(t.h,t.s,t.l,t.opacity);t instanceof Lt||(t=zt(t));var n=t.r/255,e=t.g/255,r=t.b/255,i=(Ws*r+Vs*n-$s*e)/(Ws+Vs-$s),o=r-i,a=(Gs*(e-i)-Hs*o)/Xs,u=Math.sqrt(a*a+o*o)/(Gs*i*(1-i)),f=u?Math.atan2(a,o)*Us-120:NaN;return new Qt(f<0?f+360:f,u,i,t.opacity)}(t):new Qt(t,n,e,null==r?1:r)}function Qt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Jt(t,n,e,r,i){var o=t*t,a=o*t;return((1-3*t+3*o-a)*n+(4-6*o+3*a)*e+(1+3*t+3*o-3*a)*r+a*i)/6}function Kt(t){var n=t.length-1;return function(e){var r=e<=0?e=0:e>=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],a=r>0?t[r-1]:2*i-o,u=r180||e<-180?e-360*Math.round(e/360):e):nn(isNaN(t)?n:t)}function on(t){return 1==(t=+t)?an:function(n,e){return e-n?function(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}(n,e,t):nn(isNaN(n)?e:n)}}function an(t,n){var e=n-t;return e?en(t,e):nn(isNaN(t)?n:t)}function un(t){return function(n){var e,r,i=n.length,o=new Array(i),a=new Array(i),u=new Array(i);for(e=0;eo&&(i=n.slice(o,i),u[a]?u[a]+=i:u[++a]=i),(e=e[0])===(r=r[0])?u[a]?u[a]+=r:u[++a]=r:(u[++a]=null,f.push({i:a,x:sn(e,r)})),o=al.lastIndex;return o180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:sn(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}(o.rotate,a.rotate,u,f),function(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:sn(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}(o.skewX,a.skewX,u,f),function(t,n,e,r,o,a){if(t!==e||n!==r){var u=o.push(i(o)+"scale(",null,",",null,")");a.push({i:u-4,x:sn(t,e)},{i:u-2,x:sn(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}(o.scaleX,o.scaleY,a.scaleX,a.scaleY,u,f),o=a=null,function(t){for(var n,e=-1,r=f.length;++e=0&&n._call.call(null,t),n=n._next;--xl}function Sn(){Nl=(Tl=El.now())+Sl,xl=wl=0;try{Nn()}finally{xl=0,function(){var t,n,e=tl,r=1/0;for(;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:tl=n);nl=t,kn(r)}(),Nl=0}}function En(){var t=El.now(),n=t-Tl;n>Al&&(Sl-=n,Tl=t)}function kn(t){if(!xl){wl&&(wl=clearTimeout(wl));t-Nl>24?(t<1/0&&(wl=setTimeout(Sn,t-El.now()-Sl)),Ml&&(Ml=clearInterval(Ml))):(Ml||(Tl=El.now(),Ml=setInterval(En,Al)),xl=1,kl(Sn))}}function Cn(t,n,e){var r=new An;return n=null==n?0:+n,r.restart(function(e){r.stop(),t(e+n)},n,e),r}function Pn(t,n,e,r,i,o){var a=t.__transition;if(a){if(e in a)return}else t.__transition={};(function(t,n,e){function r(f){var c,s,l,h;if(e.state!==Rl)return o();for(c in u)if((h=u[c]).name===e.name){if(h.state===Dl)return Cn(r);h.state===Ul?(h.state=Ol,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete u[c]):+czl)throw new Error("too late; already scheduled");return e}function Rn(t,n){var e=Ln(t,n);if(e.state>Ll)throw new Error("too late; already started");return e}function Ln(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("transition not found");return e}function Dn(t,n){var e,r,i,o=t.__transition,a=!0;if(o){n=null==n?null:n+"";for(i in o)(e=o[i]).name===n?(r=e.state>Ll&&e.stateMath.abs(t[1]-U[1])?x=!0:m=!0),U=t,b=!0,Zn(),o()}function o(){var t;switch(y=U[0]-D[0],_=U[1]-D[1],A){case dh:case hh:T&&(y=Math.max(C-u,Math.min(z-d,y)),c=u+y,p=d+y),N&&(_=Math.max(P-l,Math.min(R-v,_)),h=l+_,g=v+_);break;case ph:T<0?(y=Math.max(C-u,Math.min(z-u,y)),c=u+y,p=d):T>0&&(y=Math.max(C-d,Math.min(z-d,y)),c=u,p=d+y),N<0?(_=Math.max(P-l,Math.min(R-l,_)),h=l+_,g=v):N>0&&(_=Math.max(P-v,Math.min(R-v,_)),h=l,g=v+_);break;case vh:T&&(c=Math.max(C,Math.min(z,u-y*T)),p=Math.max(C,Math.min(z,d+y*T))),N&&(h=Math.max(P,Math.min(R,l-_*N)),g=Math.max(P,Math.min(R,v+_*N)))}p0&&(u=c-y),N<0?v=g-_:N>0&&(l=h-_),A=dh,Y.attr("cursor",bh.selection),o());break;default:return}Zn()},!0).on("keyup.brush",function(){switch(t.event.keyCode){case 16:L&&(m=x=L=!1,o());break;case 18:A===vh&&(T<0?d=p:T>0&&(u=c),N<0?v=g:N>0&&(l=h),A=ph,o());break;case 32:A===dh&&(t.event.altKey?(T&&(d=p-y*T,u=c+y*T),N&&(v=g-_*N,l=h+_*N),A=vh):(T<0?d=p:T>0&&(u=c),N<0?v=g:N>0&&(l=h),A=ph),Y.attr("cursor",bh[M]),o());break;default:return}Zn()},!0).on("mousemove.brush",e,!0).on("mouseup.brush",a,!0);_t(t.event.view)}Wn(),Dn(w),r.call(w),q.start()}}function u(){var t=this.__brush||{selection:null};return t.extent=c.apply(this,arguments),t.dim=n,t}var f,c=Kn,s=Jn,l=N(e,"start","brush","end"),h=6;return e.move=function(t,e){t.selection?t.on("start.brush",function(){i(this,arguments).beforestart().start()}).on("interrupt.brush end.brush",function(){i(this,arguments).end()}).tween("brush",function(){function t(t){a.selection=1===t&&ne(c)?null:s(t),r.call(o),u.brush()}var o=this,a=o.__brush,u=i(o,arguments),f=a.selection,c=n.input("function"==typeof e?e.apply(this,arguments):e,a.extent),s=dn(f,c);return f&&c?t:t(1)}):t.each(function(){var t=arguments,o=this.__brush,a=n.input("function"==typeof e?e.apply(this,t):e,o.extent),u=i(this,t).beforestart();Dn(this),o.selection=null==a||ne(a)?null:a,r.call(this),u.start().brush().end()})},o.prototype={beforestart:function(){return 1==++this.active&&(this.state.emitter=this,this.starting=!0),this},start:function(){return this.starting&&(this.starting=!1,this.emit("start")),this},brush:function(){return this.emit("brush"),this},end:function(){return 0==--this.active&&(delete this.state.emitter,this.emit("end")),this},emit:function(t){ot(new function(t,n,e){this.target=t,this.type=n,this.selection=e}(e,t,n.output(this.state.selection)),l.apply,l,[t,this.that,this.args])}},e.extent=function(t){return arguments.length?(c="function"==typeof t?t:$n([[+t[0][0],+t[0][1]],[+t[1][0],+t[1][1]]]),e):c},e.filter=function(t){return arguments.length?(s="function"==typeof t?t:$n(!!t),e):s},e.handleSize=function(t){return arguments.length?(h=+t,e):h},e.on=function(){var t=l.on.apply(l,arguments);return t===l?e:t},e}function re(t){return function(){return t}}function ie(){this._x0=this._y0=this._x1=this._y1=null,this._=""}function oe(){return new ie}function ae(t){return t.source}function ue(t){return t.target}function fe(t){return t.radius}function ce(t){return t.startAngle}function se(t){return t.endAngle}function le(){}function he(t,n){var e=new le;if(t instanceof le)t.each(function(t,n){e.set(n,t)});else if(Array.isArray(t)){var r,i=-1,o=t.length;if(null==n)for(;++ir!=d>r&&e<(h-c)*(r-s)/(d-s)+c&&(i=-i)}return i}(t,n[r]))return e;return 0}function we(){}function Me(){function t(t){var e=a(t);if(Array.isArray(e))e=e.slice().sort(be);else{var r=u(t),i=r[0],o=r[1];e=d(i,o,e),e=s(Math.floor(i/e)*e,Math.floor(o/e)*e,e)}return e.map(function(e){return n(t,e)})}function n(t,n){var r=[],a=[];return function(t,n,r){function a(t){var n,i,o=[t[0][0]+u,t[0][1]+f],a=[t[1][0]+u,t[1][1]+f],c=e(o),s=e(a);(n=p[c])?(i=d[s])?(delete p[n.end],delete d[i.start],n===i?(n.ring.push(a),r(n.ring)):d[n.start]=p[i.end]={start:n.start,end:i.end,ring:n.ring.concat(i.ring)}):(delete p[n.end],n.ring.push(a),p[n.end=s]=n):(n=d[s])?(i=p[c])?(delete d[n.start],delete p[i.end],n===i?(n.ring.push(a),r(n.ring)):d[i.start]=p[n.end]={start:i.start,end:n.end,ring:i.ring.concat(n.ring)}):(delete d[n.start],n.ring.unshift(o),d[n.start=c]=n):d[c]=p[s]={start:c,end:s,ring:[o,a]}}var u,f,c,s,l,h,d=new Array,p=new Array;u=f=-1,s=t[0]>=n,Uh[s<<1].forEach(a);for(;++u=n,Uh[c|s<<1].forEach(a);Uh[s<<0].forEach(a);for(;++f=n,l=t[f*i]>=n,Uh[s<<1|l<<2].forEach(a);++u=n,h=l,l=t[f*i+u+1]>=n,Uh[c|s<<1|l<<2|h<<3].forEach(a);Uh[s|l<<3].forEach(a)}u=-1,l=t[f*i]>=n,Uh[l<<2].forEach(a);for(;++u=n,Uh[l<<2|h<<3].forEach(a);Uh[l<<3].forEach(a)}(t,n,function(e){f(e,t,n),function(t){for(var n=0,e=t.length,r=t[e-1][1]*t[0][0]-t[e-1][0]*t[0][1];++n0?r.push([e]):a.push(e)}),a.forEach(function(t){for(var n,e=0,i=r.length;e0&&a0&&u0&&r>0))throw new Error("invalid size");return i=e,o=r,t},t.thresholds=function(n){return arguments.length?(a="function"==typeof n?n:Array.isArray(n)?me(Dh.call(n)):me(n),t):a},t.smooth=function(n){return arguments.length?(f=n?r:we,t):f===r},t}function Ae(t,n,e){for(var r=t.width,i=t.height,o=1+(e<<1),a=0;a=e&&(u>=o&&(f-=t.data[u-o+a*r]),n.data[u-e+a*r]=f/Math.min(u+1,r-1+o-u,o))}function Te(t,n,e){for(var r=t.width,i=t.height,o=1+(e<<1),a=0;a=e&&(u>=o&&(f-=t.data[a+(u-o)*r]),n.data[a+(u-e)*r]=f/Math.min(u+1,i-1+o-u,o))}function Ne(t){return t[0]}function Se(t){return t[1]}function Ee(t){return new Function("d","return {"+t.map(function(t,n){return JSON.stringify(t)+": d["+n+"]"}).join(",")+"}")}function ke(t){function n(t,n){function e(){if(c)return Oh;if(s)return s=!1,qh;var n,e,r=u;if(t.charCodeAt(r)===Yh){for(;u++=a?c=!0:(e=t.charCodeAt(u++))===Bh?s=!0:e===Fh&&(s=!0,t.charCodeAt(u)===Bh&&++u),t.slice(r+1,n-1).replace(/""/g,'"')}for(;u=(o=(v+y)/2))?v=o:y=o,(s=e>=(a=(g+_)/2))?g=a:_=a,i=d,!(d=d[l=s<<1|c]))return i[l]=p,t;if(u=+t._x.call(null,d.data),f=+t._y.call(null,d.data),n===u&&e===f)return p.next=d,i?i[l]=p:t._root=p,t;do{i=i?i[l]=new Array(4):t._root=new Array(4),(c=n>=(o=(v+y)/2))?v=o:y=o,(s=e>=(a=(g+_)/2))?g=a:_=a}while((l=s<<1|c)==(h=(f>=a)<<1|u>=o));return i[h]=d,i[l]=p,t}function Be(t,n,e,r,i){this.node=t,this.x0=n,this.y0=e,this.x1=r,this.y1=i}function Fe(t){return t[0]}function Ie(t){return t[1]}function je(t,n,e){var r=new He(null==n?Fe:n,null==e?Ie:e,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function He(t,n,e,r,i,o){this._x=t,this._y=n,this._x0=e,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function Xe(t){for(var n={data:t.data},e=n;t=t.next;)e=e.next={data:t.data};return n}function Ge(t){return t.x+t.vx}function Ve(t){return t.y+t.vy}function $e(t){return t.index}function We(t,n){var e=t.get(n);if(!e)throw new Error("missing: "+n);return e}function Ze(t){return t.x}function Qe(t){return t.y}function Je(t,n){if((e=(t=n?t.toExponential(n-1):t.toExponential()).indexOf("e"))<0)return null;var e,r=t.slice(0,e);return[r.length>1?r[0]+r.slice(2):r,+t.slice(e+1)]}function Ke(t){return(t=Je(Math.abs(t)))?t[1]:NaN}function tr(t){return new nr(t)}function nr(t){if(!(n=ad.exec(t)))throw new Error("invalid format: "+t);var n;this.fill=n[1]||" ",this.align=n[2]||">",this.sign=n[3]||"-",this.symbol=n[4]||"",this.zero=!!n[5],this.width=n[6]&&+n[6],this.comma=!!n[7],this.precision=n[8]&&+n[8].slice(1),this.trim=!!n[9],this.type=n[10]||""}function er(t,n){var e=Je(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")}function rr(t){return t}function ir(t){function n(t){function n(t){var n,r,a,s=y,x=_;if("c"===g)x=b(t)+x,t="";else{var w=(t=+t)<0;if(t=b(Math.abs(t),p),v&&(t=function(t){t:for(var n,e=t.length,r=1,i=-1;r0){if(!+t[r])break t;i=0}}return i>0?t.slice(0,i)+t.slice(n+1):t}(t)),w&&0==+t&&(w=!1),s=(w?"("===c?c:"-":"-"===c||"("===c?"":c)+s,x=("s"===g?sd[8+ud/3]:"")+x+(w&&"("===c?")":""),m)for(n=-1,r=t.length;++n(a=t.charCodeAt(n))||a>57){x=(46===a?i+t.slice(n+1):t.slice(n))+x,t=t.slice(0,n);break}}d&&!l&&(t=e(t,1/0));var M=s.length+t.length+x.length,A=M>1)+s+t+x+A.slice(M);break;default:t=A+s+t+x}return o(t)}var u=(t=tr(t)).fill,f=t.align,c=t.sign,s=t.symbol,l=t.zero,h=t.width,d=t.comma,p=t.precision,v=t.trim,g=t.type;"n"===g?(d=!0,g="g"):cd[g]||(null==p&&(p=12),v=!0,g="g"),(l||"0"===u&&"="===f)&&(l=!0,u="0",f="=");var y="$"===s?r[0]:"#"===s&&/[boxX]/.test(g)?"0"+g.toLowerCase():"",_="$"===s?r[1]:/[%p]/.test(g)?a:"",b=cd[g],m=/[defgprs%]/.test(g);return p=null==p?6:/[gprs]/.test(g)?Math.max(1,Math.min(21,p)):Math.max(0,Math.min(20,p)),n.toString=function(){return t+""},n}var e=t.grouping&&t.thousands?function(t,n){return function(e,r){for(var i=e.length,o=[],a=0,u=t[0],f=0;i>0&&u>0&&(f+u+1>r&&(u=Math.max(1,r-f)),o.push(e.substring(i-=u,i+u)),!((f+=u+1)>r));)u=t[a=(a+1)%t.length];return o.reverse().join(n)}}(t.grouping,t.thousands):rr,r=t.currency,i=t.decimal,o=t.numerals?function(t){return function(n){return n.replace(/[0-9]/g,function(n){return t[+n]})}}(t.numerals):rr,a=t.percent||"%";return{format:n,formatPrefix:function(t,e){var r=n((t=tr(t),t.type="f",t)),i=3*Math.max(-8,Math.min(8,Math.floor(Ke(e)/3))),o=Math.pow(10,-i),a=sd[8+i/3];return function(t){return r(o*t)+a}}}}function or(n){return fd=ir(n),t.format=fd.format,t.formatPrefix=fd.formatPrefix,fd}function ar(t){return Math.max(0,-Ke(Math.abs(t)))}function ur(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(Ke(n)/3)))-Ke(Math.abs(t)))}function fr(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,Ke(n)-Ke(t))+1}function cr(){return new sr}function sr(){this.reset()}function lr(t,n,e){var r=t.s=n+e,i=r-n,o=r-i;t.t=n-o+(e-i)}function hr(t){return t>1?0:t<-1?Xd:Math.acos(t)}function dr(t){return t>1?Gd:t<-1?-Gd:Math.asin(t)}function pr(t){return(t=op(t/2))*t}function vr(){}function gr(t,n){t&&sp.hasOwnProperty(t.type)&&sp[t.type](t,n)}function yr(t,n,e){var r,i=-1,o=t.length-e;for(n.lineStart();++i=0?1:-1,i=r*e,o=tp(n),a=op(n),u=vd*a,f=pd*o+u*tp(i),c=u*r*op(i);lp.add(Kd(c,f)),dd=t,pd=o,vd=a}function Ar(t){return[Kd(t[1],t[0]),dr(t[2])]}function Tr(t){var n=t[0],e=t[1],r=tp(e);return[r*tp(n),r*op(n),op(e)]}function Nr(t,n){return t[0]*n[0]+t[1]*n[1]+t[2]*n[2]}function Sr(t,n){return[t[1]*n[2]-t[2]*n[1],t[2]*n[0]-t[0]*n[2],t[0]*n[1]-t[1]*n[0]]}function Er(t,n){t[0]+=n[0],t[1]+=n[1],t[2]+=n[2]}function kr(t,n){return[t[0]*n,t[1]*n,t[2]*n]}function Cr(t){var n=up(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=n,t[1]/=n,t[2]/=n}function Pr(t,n){Ad.push(Td=[gd=t,_d=t]),nbd&&(bd=n)}function zr(t,n){var e=Tr([t*Zd,n*Zd]);if(Md){var r=Sr(Md,e),i=Sr([r[1],-r[0],0],r);Cr(i),i=Ar(i);var o,a=t-md,u=a>0?1:-1,f=i[0]*Wd*u,c=Qd(a)>180;c^(u*mdbd&&(bd=o):(f=(f+360)%360-180,c^(u*mdbd&&(bd=n))),c?tOr(gd,_d)&&(_d=t):Or(t,_d)>Or(gd,_d)&&(gd=t):_d>=gd?(t_d&&(_d=t)):t>md?Or(gd,t)>Or(gd,_d)&&(_d=t):Or(t,_d)>Or(gd,_d)&&(gd=t)}else Ad.push(Td=[gd=t,_d=t]);nbd&&(bd=n),Md=e,md=t}function Rr(){vp.point=zr}function Lr(){Td[0]=gd,Td[1]=_d,vp.point=Pr,Md=null}function Dr(t,n){if(Md){var e=t-md;pp.add(Qd(e)>180?e+(e>0?360:-360):e)}else xd=t,wd=n;dp.point(t,n),zr(t,n)}function Ur(){dp.lineStart()}function qr(){Dr(xd,wd),dp.lineEnd(),Qd(pp)>jd&&(gd=-(_d=180)),Td[0]=gd,Td[1]=_d,Md=null}function Or(t,n){return(n-=t)<0?n+360:n}function Yr(t,n){return t[0]-n[0]}function Br(t,n){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:nXd?t-$d:t<-Xd?t+$d:t,n]}function ti(t,n,e){return(t%=$d)?n||e?Jr(ei(t),ri(n,e)):ei(t):n||e?ri(n,e):Kr}function ni(t){return function(n,e){return n+=t,[n>Xd?n-$d:n<-Xd?n+$d:n,e]}}function ei(t){var n=ni(t);return n.invert=ni(-t),n}function ri(t,n){function e(t,n){var e=tp(n),u=tp(t)*e,f=op(t)*e,c=op(n),s=c*r+u*i;return[Kd(f*o-s*a,u*r-c*i),dr(s*o+f*a)]}var r=tp(t),i=op(t),o=tp(n),a=op(n);return e.invert=function(t,n){var e=tp(n),u=tp(t)*e,f=op(t)*e,c=op(n),s=c*o-f*a;return[Kd(f*o+c*a,u*r+s*i),dr(s*r-u*i)]},e}function ii(t){function n(n){return n=t(n[0]*Zd,n[1]*Zd),n[0]*=Wd,n[1]*=Wd,n}return t=ti(t[0]*Zd,t[1]*Zd,t.length>2?t[2]*Zd:0),n.invert=function(n){return n=t.invert(n[0]*Zd,n[1]*Zd),n[0]*=Wd,n[1]*=Wd,n},n}function oi(t,n,e,r,i,o){if(e){var a=tp(n),u=op(n),f=r*e;null==i?(i=n+r*$d,o=n-f/2):(i=ai(a,i),o=ai(a,o),(r>0?io)&&(i+=r*$d));for(var c,s=i;r>0?s>o:s1&&n.push(n.pop().concat(n.shift()))},result:function(){var e=n;return n=[],t=null,e}}}function fi(t,n){return Qd(t[0]-n[0])=0;--o)i.point((s=c[o])[0],s[1]);else r(h.x,h.p.x,-1,i);h=h.p}c=(h=h.o).z,d=!d}while(!h.v);i.lineEnd()}}}function li(t){if(n=t.length){for(var n,e,r=0,i=t[0];++r=0?1:-1,T=A*M,N=T>Xd,S=v*x;if(Ep.add(Kd(S*A*op(T),g*w+S*tp(T))),a+=N?M+A*$d:M,N^d>=e^b>=e){var E=Sr(Tr(h),Tr(_));Cr(E);var k=Sr(o,E);Cr(k);var C=(N^M>=0?-1:1)*dr(k[2]);(r>C||r===C&&(E[0]||E[1]))&&(u+=N^M>=0?1:-1)}}return(a<-jd||a0){for(b||(i.polygonStart(),b=!0),i.lineStart(),t=0;t1&&2&o&&a.push(a.pop().concat(a.shift())),d.push(a.filter(pi))}var h,d,p,v=n(i),g=ui(),_=n(g),b=!1,m={point:o,lineStart:u,lineEnd:f,polygonStart:function(){m.point=c,m.lineStart=s,m.lineEnd=l,d=[],h=[]},polygonEnd:function(){m.point=o,m.lineStart=u,m.lineEnd=f,d=y(d);var t=hi(h,r);d.length?(b||(i.polygonStart(),b=!0),si(d,vi,t,e,i)):t&&(b||(i.polygonStart(),b=!0),i.lineStart(),e(null,null,1,i),i.lineEnd()),b&&(i.polygonEnd(),b=!1),d=h=null},sphere:function(){i.polygonStart(),i.lineStart(),e(null,null,1,i),i.lineEnd(),i.polygonEnd()}};return m}}function pi(t){return t.length>1}function vi(t,n){return((t=t.x)[0]<0?t[1]-Gd-jd:Gd-t[1])-((n=n.x)[0]<0?n[1]-Gd-jd:Gd-n[1])}function gi(t){function n(t,n){return tp(t)*tp(n)>i}function e(t,n,e){var r=[1,0,0],o=Sr(Tr(t),Tr(n)),a=Nr(o,o),u=o[0],f=a-u*u;if(!f)return!e&&t;var c=i*a/f,s=-i*u/f,l=Sr(r,o),h=kr(r,c);Er(h,kr(o,s));var d=l,p=Nr(h,d),v=Nr(d,d),g=p*p-v*(Nr(h,h)-1);if(!(g<0)){var y=up(g),_=kr(d,(-p-y)/v);if(Er(_,h),_=Ar(_),!e)return _;var b,m=t[0],x=n[0],w=t[1],M=n[1];x0^_[1]<(Qd(_[0]-m)Xd^(m<=_[0]&&_[0]<=x)){var N=kr(d,(-p+y)/v);return Er(N,h),[_,Ar(N)]}}}function r(n,e){var r=a?t:Xd-t,i=0;return n<-r?i|=1:n>r&&(i|=2),e<-r?i|=4:e>r&&(i|=8),i}var i=tp(t),o=6*Zd,a=i>0,u=Qd(i)>jd;return di(n,function(t){var i,o,f,c,s;return{lineStart:function(){c=f=!1,s=1},point:function(l,h){var d,p=[l,h],v=n(l,h),g=a?v?0:r(l,h):v?r(l+(l<0?Xd:-Xd),h):0;if(!i&&(c=f=v)&&t.lineStart(),v!==f&&(!(d=e(i,p))||fi(i,d)||fi(p,d))&&(p[0]+=jd,p[1]+=jd,v=n(p[0],p[1])),v!==f)s=0,v?(t.lineStart(),d=e(p,i),t.point(d[0],d[1])):(d=e(i,p),t.point(d[0],d[1]),t.lineEnd()),i=d;else if(u&&i&&a^v){var y;g&o||!(y=e(p,i,!0))||(s=0,a?(t.lineStart(),t.point(y[0][0],y[0][1]),t.point(y[1][0],y[1][1]),t.lineEnd()):(t.point(y[1][0],y[1][1]),t.lineEnd(),t.lineStart(),t.point(y[0][0],y[0][1])))}!v||i&&fi(i,p)||t.point(p[0],p[1]),i=p,f=v,o=g},lineEnd:function(){f&&t.lineEnd(),i=null},clean:function(){return s|(c&&f)<<1}}},function(n,e,r,i){oi(i,t,o,r,n,e)},a?[0,-t]:[-Xd,t-Xd])}function yi(t,n,e,r){function i(i,o){return t<=i&&i<=e&&n<=o&&o<=r}function o(i,o,u,c){var s=0,l=0;if(null==i||(s=a(i,u))!==(l=a(o,u))||f(i,o)<0^u>0)do{c.point(0===s||3===s?t:e,s>1?r:n)}while((s=(s+u+4)%4)!==l);else c.point(o[0],o[1])}function a(r,i){return Qd(r[0]-t)0?0:3:Qd(r[0]-e)0?2:1:Qd(r[1]-n)0?1:0:i>0?3:2}function u(t,n){return f(t.x,n.x)}function f(t,n){var e=a(t,1),r=a(n,1);return e!==r?e-r:0===e?n[1]-t[1]:1===e?t[0]-n[0]:2===e?t[1]-n[1]:n[0]-t[0]}return function(a){function f(t,n){i(t,n)&&w.point(t,n)}function c(o,a){var u=i(o,a);if(l&&h.push([o,a]),m)d=o,p=a,v=u,m=!1,u&&(w.lineStart(),w.point(o,a));else if(u&&b)w.point(o,a);else{var f=[g=Math.max(Pp,Math.min(Cp,g)),_=Math.max(Pp,Math.min(Cp,_))],c=[o=Math.max(Pp,Math.min(Cp,o)),a=Math.max(Pp,Math.min(Cp,a))];!function(t,n,e,r,i,o){var a,u=t[0],f=t[1],c=0,s=1,l=n[0]-u,h=n[1]-f;if(a=e-u,l||!(a>0)){if(a/=l,l<0){if(a0){if(a>s)return;a>c&&(c=a)}if(a=i-u,l||!(a<0)){if(a/=l,l<0){if(a>s)return;a>c&&(c=a)}else if(l>0){if(a0)){if(a/=h,h<0){if(a0){if(a>s)return;a>c&&(c=a)}if(a=o-f,h||!(a<0)){if(a/=h,h<0){if(a>s)return;a>c&&(c=a)}else if(h>0){if(a0&&(t[0]=u+c*l,t[1]=f+c*h),s<1&&(n[0]=u+s*l,n[1]=f+s*h),!0}}}}}(f,c,t,n,e,r)?u&&(w.lineStart(),w.point(o,a),x=!1):(b||(w.lineStart(),w.point(f[0],f[1])),w.point(c[0],c[1]),u||w.lineEnd(),x=!1)}g=o,_=a,b=u}var s,l,h,d,p,v,g,_,b,m,x,w=a,M=ui(),A={point:f,lineStart:function(){A.point=c,l&&l.push(h=[]),m=!0,b=!1,g=_=NaN},lineEnd:function(){s&&(c(d,p),v&&b&&M.rejoin(),s.push(M.result())),A.point=f,b&&w.lineEnd()},polygonStart:function(){w=M,s=[],l=[],x=!0},polygonEnd:function(){var n=function(){for(var n=0,e=0,i=l.length;er&&(h-o)*(r-a)>(d-a)*(t-o)&&++n:d<=r&&(h-o)*(r-a)<(d-a)*(t-o)&&--n;return n}(),e=x&&n,i=(s=y(s)).length;(e||i)&&(a.polygonStart(),e&&(a.lineStart(),o(null,null,1,a),a.lineEnd()),i&&si(s,u,n,o,a),a.polygonEnd()),w=a,s=l=h=null}};return A}}function _i(){Rp.point=Rp.lineEnd=vr}function bi(t,n){yp=t*=Zd,_p=op(n*=Zd),bp=tp(n),Rp.point=mi}function mi(t,n){t*=Zd;var e=op(n*=Zd),r=tp(n),i=Qd(t-yp),o=tp(i),a=r*op(i),u=bp*e-_p*r*o,f=_p*e+bp*r*o;zp.add(Kd(up(a*a+u*u),f)),yp=t,_p=e,bp=r}function xi(t){return zp.reset(),br(t,Rp),+zp}function wi(t,n){return Lp[0]=t,Lp[1]=n,xi(Dp)}function Mi(t,n){return!(!t||!qp.hasOwnProperty(t.type))&&qp[t.type](t,n)}function Ai(t,n){return 0===wi(t,n)}function Ti(t,n){var e=wi(t[0],t[1]);return wi(t[0],n)+wi(n,t[1])<=e+jd}function Ni(t,n){return!!hi(t.map(Si),Ei(n))}function Si(t){return(t=t.map(Ei)).pop(),t}function Ei(t){return[t[0]*Zd,t[1]*Zd]}function ki(t,n,e){var r=s(t,n-jd,e).concat(n);return function(t){return r.map(function(n){return[t,n]})}}function Ci(t,n,e){var r=s(t,n-jd,e).concat(n);return function(t){return r.map(function(n){return[n,t]})}}function Pi(){function t(){return{type:"MultiLineString",coordinates:n()}}function n(){return s(np(o/y)*y,i,y).map(d).concat(s(np(c/_)*_,f,_).map(p)).concat(s(np(r/v)*v,e,v).filter(function(t){return Qd(t%y)>jd}).map(l)).concat(s(np(u/g)*g,a,g).filter(function(t){return Qd(t%_)>jd}).map(h))}var e,r,i,o,a,u,f,c,l,h,d,p,v=10,g=v,y=90,_=360,b=2.5;return t.lines=function(){return n().map(function(t){return{type:"LineString",coordinates:t}})},t.outline=function(){return{type:"Polygon",coordinates:[d(o).concat(p(f).slice(1),d(i).reverse().slice(1),p(c).reverse().slice(1))]}},t.extent=function(n){return arguments.length?t.extentMajor(n).extentMinor(n):t.extentMinor()},t.extentMajor=function(n){return arguments.length?(o=+n[0][0],i=+n[1][0],c=+n[0][1],f=+n[1][1],o>i&&(n=o,o=i,i=n),c>f&&(n=c,c=f,f=n),t.precision(b)):[[o,c],[i,f]]},t.extentMinor=function(n){return arguments.length?(r=+n[0][0],e=+n[1][0],u=+n[0][1],a=+n[1][1],r>e&&(n=r,r=e,e=n),u>a&&(n=u,u=a,a=n),t.precision(b)):[[r,u],[e,a]]},t.step=function(n){return arguments.length?t.stepMajor(n).stepMinor(n):t.stepMinor()},t.stepMajor=function(n){return arguments.length?(y=+n[0],_=+n[1],t):[y,_]},t.stepMinor=function(n){return arguments.length?(v=+n[0],g=+n[1],t):[v,g]},t.precision=function(n){return arguments.length?(b=+n,l=ki(u,a,90),h=Ci(r,e,b),d=ki(c,f,90),p=Ci(o,i,b),t):b},t.extentMajor([[-180,-90+jd],[180,90-jd]]).extentMinor([[-180,-80-jd],[180,80+jd]])}function zi(t){return t}function Ri(){Bp.point=Li}function Li(t,n){Bp.point=Di,mp=wp=t,xp=Mp=n}function Di(t,n){Yp.add(Mp*t-wp*n),wp=t,Mp=n}function Ui(){Di(mp,xp)}function qi(t,n){Gp+=t,Vp+=n,++$p}function Oi(){nv.point=Yi}function Yi(t,n){nv.point=Bi,qi(Np=t,Sp=n)}function Bi(t,n){var e=t-Np,r=n-Sp,i=up(e*e+r*r);Wp+=i*(Np+t)/2,Zp+=i*(Sp+n)/2,Qp+=i,qi(Np=t,Sp=n)}function Fi(){nv.point=qi}function Ii(){nv.point=Hi}function ji(){Xi(Ap,Tp)}function Hi(t,n){nv.point=Xi,qi(Ap=Np=t,Tp=Sp=n)}function Xi(t,n){var e=t-Np,r=n-Sp,i=up(e*e+r*r);Wp+=i*(Np+t)/2,Zp+=i*(Sp+n)/2,Qp+=i,Jp+=(i=Sp*t-Np*n)*(Np+t),Kp+=i*(Sp+n),tv+=3*i,qi(Np=t,Sp=n)}function Gi(t){this._context=t}function Vi(t,n){fv.point=$i,rv=ov=t,iv=av=n}function $i(t,n){ov-=t,av-=n,uv.add(up(ov*ov+av*av)),ov=t,av=n}function Wi(){this._string=[]}function Zi(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function Qi(t){return function(n){var e=new Ji;for(var r in t)e[r]=t[r];return e.stream=n,e}}function Ji(){}function Ki(t,n,e){var r=t.clipExtent&&t.clipExtent();return t.scale(150).translate([0,0]),null!=r&&t.clipExtent(null),br(e,t.stream(Xp)),n(Xp.result()),null!=r&&t.clipExtent(r),t}function to(t,n,e){return Ki(t,function(e){var r=n[1][0]-n[0][0],i=n[1][1]-n[0][1],o=Math.min(r/(e[1][0]-e[0][0]),i/(e[1][1]-e[0][1])),a=+n[0][0]+(r-o*(e[1][0]+e[0][0]))/2,u=+n[0][1]+(i-o*(e[1][1]+e[0][1]))/2;t.scale(150*o).translate([a,u])},e)}function no(t,n,e){return to(t,[[0,0],n],e)}function eo(t,n,e){return Ki(t,function(e){var r=+n,i=r/(e[1][0]-e[0][0]),o=(r-i*(e[1][0]+e[0][0]))/2,a=-i*e[0][1];t.scale(150*i).translate([o,a])},e)}function ro(t,n,e){return Ki(t,function(e){var r=+n,i=r/(e[1][1]-e[0][1]),o=-i*e[0][0],a=(r-i*(e[1][1]+e[0][1]))/2;t.scale(150*i).translate([o,a])},e)}function io(t,n){return+n?function(t,n){function e(r,i,o,a,u,f,c,s,l,h,d,p,v,g){var y=c-r,_=s-i,b=y*y+_*_;if(b>4*n&&v--){var m=a+h,x=u+d,w=f+p,M=up(m*m+x*x+w*w),A=dr(w/=M),T=Qd(Qd(w)-1)n||Qd((y*k+_*C)/b-.5)>.3||a*h+u*d+f*p2?t[2]%360*Zd:0,e()):[b*Wd,m*Wd,x*Wd]},n.angle=function(t){return arguments.length?(w=t%360*Zd,e()):w*Wd},n.precision=function(t){return arguments.length?(c=io(s,S=t*t),r()):up(S)},n.fitExtent=function(t,e){return to(n,t,e)},n.fitSize=function(t,e){return no(n,t,e)},n.fitWidth=function(t,e){return eo(n,t,e)},n.fitHeight=function(t,e){return ro(n,t,e)},function(){return i=t.apply(this,arguments),n.invert=i.invert&&function(t){return(t=l.invert(t[0],t[1]))&&[t[0]*Wd,t[1]*Wd]},e()}}function fo(t){var n=0,e=Xd/3,r=uo(t),i=r(n,e);return i.parallels=function(t){return arguments.length?r(n=t[0]*Zd,e=t[1]*Zd):[n*Wd,e*Wd]},i}function co(t,n){function e(t,n){var e=up(o-2*i*op(n))/i;return[e*op(t*=i),a-e*tp(t)]}var r=op(t),i=(r+op(n))/2;if(Qd(i)0?n<-Gd+jd&&(n=-Gd+jd):n>Gd-jd&&(n=Gd-jd);var e=o/ip(yo(n),i);return[e*op(i*t),o-e*tp(i*t)]}var r=tp(t),i=t===n?op(t):rp(r/tp(n))/rp(yo(n)/yo(t)),o=r*ip(yo(t),i)/i;return i?(e.invert=function(t,n){var e=o-n,r=ap(i)*up(t*t+e*e);return[Kd(t,Qd(e))/i*ap(e),2*Jd(ip(o/r,1/i))-Gd]},e):vo}function bo(t,n){return[t,n]}function mo(t,n){function e(t,n){var e=o-n,r=i*t;return[e*op(r),o-e*tp(r)]}var r=tp(t),i=t===n?op(t):(r-tp(n))/(n-t),o=r/i+t;return Qd(i)=0;)n+=e[r].value;else n=1;t.value=n}function Po(t,n){var e,r,i,o,a,u=new Do(t),f=+t.value&&(u.value=t.value),c=[u];for(null==n&&(n=zo);e=c.pop();)if(f&&(e.value=+e.data.value),(i=n(e.data))&&(a=i.length))for(e.children=new Array(a),o=a-1;o>=0;--o)c.push(r=e.children[o]=new Do(i[o])),r.parent=e,r.depth=e.depth+1;return u.eachBefore(Lo)}function zo(t){return t.children}function Ro(t){t.data=t.data.data}function Lo(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function Do(t){this.data=t,this.depth=this.height=0,this.parent=null}function Uo(t){for(var n,e,r=0,i=(t=function(t){for(var n,e,r=t.length;r;)e=Math.random()*r--|0,n=t[r],t[r]=t[e],t[e]=n;return t}(pv.call(t))).length,o=[];r0&&e*e>r*r+i*i}function Yo(t,n){for(var e=0;e(a*=a)?(r=(c+a-i)/(2*c),o=Math.sqrt(Math.max(0,a/c-r*r)),e.x=t.x-r*u-o*f,e.y=t.y-r*f+o*u):(r=(c+i-a)/(2*c),o=Math.sqrt(Math.max(0,i/c-r*r)),e.x=n.x+r*u-o*f,e.y=n.y+r*f+o*u)):(e.x=n.x+e.r,e.y=n.y)}function jo(t,n){var e=t.r+n.r-1e-6,r=n.x-t.x,i=n.y-t.y;return e>0&&e*e>r*r+i*i}function Ho(t){var n=t._,e=t.next._,r=n.r+e.r,i=(n.x*e.r+e.x*n.r)/r,o=(n.y*e.r+e.y*n.r)/r;return i*i+o*o}function Xo(t){this._=t,this.next=null,this.previous=null}function Go(t){if(!(i=t.length))return 0;var n,e,r,i,o,a,u,f,c,s,l;if(n=t[0],n.x=0,n.y=0,!(i>1))return n.r;if(e=t[1],n.x=-e.r,e.x=n.r,e.y=0,!(i>2))return n.r+e.r;Io(e,n,r=t[2]),n=new Xo(n),e=new Xo(e),r=new Xo(r),n.next=r.previous=e,e.next=n.previous=r,r.next=e.previous=n;t:for(u=3;uh&&(h=u),g=s*s*v,(d=Math.max(h/g,g/l))>p){s-=u;break}p=d}y.push(a={value:s,dice:f1&&ha(t[e[r-2]],t[e[r-1]],t[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function va(){return Math.random()}function ga(t){function n(n){var o=n+"",a=e.get(o);if(!a){if(i!==Cv)return i;e.set(o,a=r.push(n))}return t[(a-1)%t.length]}var e=he(),r=[],i=Cv;return t=null==t?[]:kv.call(t),n.domain=function(t){if(!arguments.length)return r.slice();r=[],e=he();for(var i,o,a=-1,u=t.length;++a2?Ma:wa,o=a=null,r}function r(n){return(o||(o=i(u,f,s?function(t){return function(n,e){var r=t(n=+n,e=+e);return function(t){return t<=n?0:t>=e?1:r(t)}}}(t):t,c)))(+n)}var i,o,a,u=Pv,f=Pv,c=dn,s=!1;return r.invert=function(t){return(a||(a=i(f,u,xa,s?function(t){return function(n,e){var r=t(n=+n,e=+e);return function(t){return t<=0?n:t>=1?e:r(t)}}}(n):n)))(+t)},r.domain=function(t){return arguments.length?(u=Ev.call(t,ma),e()):u.slice()},r.range=function(t){return arguments.length?(f=kv.call(t),e()):f.slice()},r.rangeRound=function(t){return f=kv.call(t),c=pn,e()},r.clamp=function(t){return arguments.length?(s=!!t,e()):s},r.interpolate=function(t){return arguments.length?(c=t,e()):c},e()}function Na(n){var e=n.domain;return n.ticks=function(t){var n=e();return l(n[0],n[n.length-1],null==t?10:t)},n.tickFormat=function(n,r){return function(n,e,r){var i,o=n[0],a=n[n.length-1],u=d(o,a,null==e?10:e);switch((r=tr(null==r?",f":r)).type){case"s":var f=Math.max(Math.abs(o),Math.abs(a));return null!=r.precision||isNaN(i=ur(u,f))||(r.precision=i),t.formatPrefix(r,f);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(i=fr(u,Math.max(Math.abs(o),Math.abs(a))))||(r.precision=i-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(i=ar(u))||(r.precision=i-2*("%"===r.type))}return t.format(r)}(e(),n,r)},n.nice=function(t){null==t&&(t=10);var r,i=e(),o=0,a=i.length-1,u=i[o],f=i[a];return f0?r=h(u=Math.floor(u/r)*r,f=Math.ceil(f/r)*r,t):r<0&&(r=h(u=Math.ceil(u*r)/r,f=Math.floor(f*r)/r,t)),r>0?(i[o]=Math.floor(u/r)*r,i[a]=Math.ceil(f/r)*r,e(i)):r<0&&(i[o]=Math.ceil(u*r)/r,i[a]=Math.floor(f*r)/r,e(i)),n},n}function Sa(){var t=Ta(xa,sn);return t.copy=function(){return Aa(t,Sa())},Na(t)}function Ea(){function t(t){return+t}var n=[0,1];return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=Ev.call(e,ma),t):n.slice()},t.copy=function(){return Ea().domain(n)},Na(t)}function ka(t,n){var e,r=0,i=(t=t.slice()).length-1,o=t[r],a=t[i];return a0){for(;df)break;g.push(h)}}else for(;d=1;--s)if(!((h=c*s)f)break;g.push(h)}}else g=l(d,p,Math.min(p-d,v)).map(a);return n?g.reverse():g},e.tickFormat=function(n,r){if(null==r&&(r=10===i?".0e":","),"function"!=typeof r&&(r=t.format(r)),n===1/0)return r;null==n&&(n=10);var u=Math.max(1,i*n/e.ticks().length);return function(t){var n=t/a(Math.round(o(t)));return n*i0?o[n-1]:r[0],n=i?[o[i-1],r]:[o[n-1],o[n]]},t.copy=function(){return Ba().domain([e,r]).range(a)},Na(t)}function Fa(){function t(t){if(t<=t)return e[Jc(n,t,0,r)]}var n=[.5],e=[0,1],r=1;return t.domain=function(i){return arguments.length?(n=kv.call(i),r=Math.min(n.length,e.length-1),t):n.slice()},t.range=function(i){return arguments.length?(e=kv.call(i),r=Math.min(n.length,e.length-1),t):e.slice()},t.invertExtent=function(t){var r=e.indexOf(t);return[n[r-1],n[r]]},t.copy=function(){return Fa().domain(n).range(e)},t}function Ia(t,n,e,r){function i(n){return t(n=new Date(+n)),n}return i.floor=i,i.ceil=function(e){return t(e=new Date(e-1)),n(e,1),t(e),e},i.round=function(t){var n=i(t),e=i.ceil(t);return t-n0))return u;do{u.push(a=new Date(+e)),n(e,o),t(e)}while(a=n)for(;t(n),!e(n);)n.setTime(n-1)},function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;n(t,-1),!e(t););else for(;--r>=0;)for(;n(t,1),!e(t););})},e&&(i.count=function(n,r){return zv.setTime(+n),Rv.setTime(+r),t(zv),t(Rv),Math.floor(e(zv,Rv))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(n){return r(n)%t==0}:function(n){return i.count(0,n)%t==0}):i:null}),i}function ja(t){return Ia(function(n){n.setDate(n.getDate()-(n.getDay()+7-t)%7),n.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+7*n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*Uv)/qv})}function Ha(t){return Ia(function(n){n.setUTCDate(n.getUTCDate()-(n.getUTCDay()+7-t)%7),n.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+7*n)},function(t,n){return(n-t)/qv})}function Xa(t){if(0<=t.y&&t.y<100){var n=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return n.setFullYear(t.y),n}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Ga(t){if(0<=t.y&&t.y<100){var n=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return n.setUTCFullYear(t.y),n}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function Va(t){return{y:t,m:0,d:1,H:0,M:0,S:0,L:0}}function $a(t){function n(t,n){return function(e){var r,i,o,a=[],u=-1,f=0,c=t.length;for(e instanceof Date||(e=new Date(+e));++u53)return null;"w"in a||(a.w=1),"Z"in a?(i=(o=(i=Ga(Va(a.y))).getUTCDay())>4||0===o?yg.ceil(i):yg(i),i=pg.offset(i,7*(a.V-1)),a.y=i.getUTCFullYear(),a.m=i.getUTCMonth(),a.d=i.getUTCDate()+(a.w+6)%7):(i=(o=(i=n(Va(a.y))).getDay())>4||0===o?Vv.ceil(i):Vv(i),i=Hv.offset(i,7*(a.V-1)),a.y=i.getFullYear(),a.m=i.getMonth(),a.d=i.getDate()+(a.w+6)%7)}else("W"in a||"U"in a)&&("w"in a||(a.w="u"in a?a.u%7:"W"in a?1:0),o="Z"in a?Ga(Va(a.y)).getUTCDay():n(Va(a.y)).getDay(),a.m=0,a.d="W"in a?(a.w+6)%7+7*a.W-(o+5)%7:a.w+7*a.U-(o+6)%7);return"Z"in a?(a.H+=a.Z/100|0,a.M+=a.Z%100,Ga(a)):n(a)}}function r(t,n,e,r){for(var i,o,a=0,u=n.length,f=e.length;a=f)return-1;if(37===(i=n.charCodeAt(a++))){if(i=n.charAt(a++),!(o=A[i in Dg?n.charAt(a++):i])||(r=o(t,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}var i=t.dateTime,o=t.date,a=t.time,u=t.periods,f=t.days,c=t.shortDays,s=t.months,l=t.shortMonths,h=Qa(u),d=Ja(u),p=Qa(f),v=Ja(f),g=Qa(c),y=Ja(c),_=Qa(s),b=Ja(s),m=Qa(l),x=Ja(l),w={a:function(t){return c[t.getDay()]},A:function(t){return f[t.getDay()]},b:function(t){return l[t.getMonth()]},B:function(t){return s[t.getMonth()]},c:null,d:_u,e:_u,f:Mu,H:bu,I:mu,j:xu,L:wu,m:Au,M:Tu,p:function(t){return u[+(t.getHours()>=12)]},Q:Ku,s:tf,S:Nu,u:Su,U:Eu,V:ku,w:Cu,W:Pu,x:null,X:null,y:zu,Y:Ru,Z:Lu,"%":Ju},M={a:function(t){return c[t.getUTCDay()]},A:function(t){return f[t.getUTCDay()]},b:function(t){return l[t.getUTCMonth()]},B:function(t){return s[t.getUTCMonth()]},c:null,d:Du,e:Du,f:Bu,H:Uu,I:qu,j:Ou,L:Yu,m:Fu,M:Iu,p:function(t){return u[+(t.getUTCHours()>=12)]},Q:Ku,s:tf,S:ju,u:Hu,U:Xu,V:Gu,w:Vu,W:$u,x:null,X:null,y:Wu,Y:Zu,Z:Qu,"%":Ju},A={a:function(t,n,e){var r=g.exec(n.slice(e));return r?(t.w=y[r[0].toLowerCase()],e+r[0].length):-1},A:function(t,n,e){var r=p.exec(n.slice(e));return r?(t.w=v[r[0].toLowerCase()],e+r[0].length):-1},b:function(t,n,e){var r=m.exec(n.slice(e));return r?(t.m=x[r[0].toLowerCase()],e+r[0].length):-1},B:function(t,n,e){var r=_.exec(n.slice(e));return r?(t.m=b[r[0].toLowerCase()],e+r[0].length):-1},c:function(t,n,e){return r(t,i,n,e)},d:fu,e:fu,f:pu,H:su,I:su,j:cu,L:du,m:uu,M:lu,p:function(t,n,e){var r=h.exec(n.slice(e));return r?(t.p=d[r[0].toLowerCase()],e+r[0].length):-1},Q:gu,s:yu,S:hu,u:tu,U:nu,V:eu,w:Ka,W:ru,x:function(t,n,e){return r(t,o,n,e)},X:function(t,n,e){return r(t,a,n,e)},y:ou,Y:iu,Z:au,"%":vu};return w.x=n(o,w),w.X=n(a,w),w.c=n(i,w),M.x=n(o,M),M.X=n(a,M),M.c=n(i,M),{format:function(t){var e=n(t+="",w);return e.toString=function(){return t},e},parse:function(t){var n=e(t+="",Xa);return n.toString=function(){return t},n},utcFormat:function(t){var e=n(t+="",M);return e.toString=function(){return t},e},utcParse:function(t){var n=e(t,Ga);return n.toString=function(){return t},n}}}function Wa(t,n,e){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o68?1900:2e3),e+r[0].length):-1}function au(t,n,e){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(n.slice(e,e+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),e+r[0].length):-1}function uu(t,n,e){var r=Ug.exec(n.slice(e,e+2));return r?(t.m=r[0]-1,e+r[0].length):-1}function fu(t,n,e){var r=Ug.exec(n.slice(e,e+2));return r?(t.d=+r[0],e+r[0].length):-1}function cu(t,n,e){var r=Ug.exec(n.slice(e,e+3));return r?(t.m=0,t.d=+r[0],e+r[0].length):-1}function su(t,n,e){var r=Ug.exec(n.slice(e,e+2));return r?(t.H=+r[0],e+r[0].length):-1}function lu(t,n,e){var r=Ug.exec(n.slice(e,e+2));return r?(t.M=+r[0],e+r[0].length):-1}function hu(t,n,e){var r=Ug.exec(n.slice(e,e+2));return r?(t.S=+r[0],e+r[0].length):-1}function du(t,n,e){var r=Ug.exec(n.slice(e,e+3));return r?(t.L=+r[0],e+r[0].length):-1}function pu(t,n,e){var r=Ug.exec(n.slice(e,e+6));return r?(t.L=Math.floor(r[0]/1e3),e+r[0].length):-1}function vu(t,n,e){var r=qg.exec(n.slice(e,e+1));return r?e+r[0].length:-1}function gu(t,n,e){var r=Ug.exec(n.slice(e));return r?(t.Q=+r[0],e+r[0].length):-1}function yu(t,n,e){var r=Ug.exec(n.slice(e));return r?(t.Q=1e3*+r[0],e+r[0].length):-1}function _u(t,n){return Wa(t.getDate(),n,2)}function bu(t,n){return Wa(t.getHours(),n,2)}function mu(t,n){return Wa(t.getHours()%12||12,n,2)}function xu(t,n){return Wa(1+Hv.count(fg(t),t),n,3)}function wu(t,n){return Wa(t.getMilliseconds(),n,3)}function Mu(t,n){return wu(t,n)+"000"}function Au(t,n){return Wa(t.getMonth()+1,n,2)}function Tu(t,n){return Wa(t.getMinutes(),n,2)}function Nu(t,n){return Wa(t.getSeconds(),n,2)}function Su(t){var n=t.getDay();return 0===n?7:n}function Eu(t,n){return Wa(Gv.count(fg(t),t),n,2)}function ku(t,n){var e=t.getDay();return t=e>=4||0===e?Zv(t):Zv.ceil(t),Wa(Zv.count(fg(t),t)+(4===fg(t).getDay()),n,2)}function Cu(t){return t.getDay()}function Pu(t,n){return Wa(Vv.count(fg(t),t),n,2)}function zu(t,n){return Wa(t.getFullYear()%100,n,2)}function Ru(t,n){return Wa(t.getFullYear()%1e4,n,4)}function Lu(t){var n=t.getTimezoneOffset();return(n>0?"-":(n*=-1,"+"))+Wa(n/60|0,"0",2)+Wa(n%60,"0",2)}function Du(t,n){return Wa(t.getUTCDate(),n,2)}function Uu(t,n){return Wa(t.getUTCHours(),n,2)}function qu(t,n){return Wa(t.getUTCHours()%12||12,n,2)}function Ou(t,n){return Wa(1+pg.count(zg(t),t),n,3)}function Yu(t,n){return Wa(t.getUTCMilliseconds(),n,3)}function Bu(t,n){return Yu(t,n)+"000"}function Fu(t,n){return Wa(t.getUTCMonth()+1,n,2)}function Iu(t,n){return Wa(t.getUTCMinutes(),n,2)}function ju(t,n){return Wa(t.getUTCSeconds(),n,2)}function Hu(t){var n=t.getUTCDay();return 0===n?7:n}function Xu(t,n){return Wa(gg.count(zg(t),t),n,2)}function Gu(t,n){var e=t.getUTCDay();return t=e>=4||0===e?mg(t):mg.ceil(t),Wa(mg.count(zg(t),t)+(4===zg(t).getUTCDay()),n,2)}function Vu(t){return t.getUTCDay()}function $u(t,n){return Wa(yg.count(zg(t),t),n,2)}function Wu(t,n){return Wa(t.getUTCFullYear()%100,n,2)}function Zu(t,n){return Wa(t.getUTCFullYear()%1e4,n,4)}function Qu(){return"+0000"}function Ju(){return"%"}function Ku(t){return+t}function tf(t){return Math.floor(+t/1e3)}function nf(n){return Rg=$a(n),t.timeFormat=Rg.format,t.timeParse=Rg.parse,t.utcFormat=Rg.utcFormat,t.utcParse=Rg.utcParse,Rg}function ef(t){return new Date(t)}function rf(t){return t instanceof Date?+t:+new Date(+t)}function of(t,n,r,i,o,a,u,f,c){function s(e){return(u(e)=1?A_:t<=-1?-A_:Math.asin(t)}function hf(t){return t.innerRadius}function df(t){return t.outerRadius}function pf(t){return t.startAngle}function vf(t){return t.endAngle}function gf(t){return t&&t.padAngle}function yf(t,n,e,r,i,o,a){var u=t-e,f=n-r,c=(a?o:-o)/x_(u*u+f*f),s=c*f,l=-c*u,h=t+s,d=n+l,p=e+s,v=r+l,g=(h+p)/2,y=(d+v)/2,_=p-h,b=v-d,m=_*_+b*b,x=i-o,w=h*v-p*d,M=(b<0?-1:1)*x_(__(0,x*x*m-w*w)),A=(w*b-_*M)/m,T=(-w*_-b*M)/m,N=(w*b+_*M)/m,S=(-w*_+b*M)/m,E=A-g,k=T-y,C=N-g,P=S-y;return E*E+k*k>C*C+P*P&&(A=N,T=S),{cx:A,cy:T,x01:-s,y01:-l,x11:A*(i/x-1),y11:T*(i/x-1)}}function _f(t){this._context=t}function bf(t){return new _f(t)}function mf(t){return t[0]}function xf(t){return t[1]}function wf(){function t(t){var u,f,c,s=t.length,l=!1;for(null==i&&(a=o(c=oe())),u=0;u<=s;++u)!(u=s;--l)c.point(g[l],y[l]);c.lineEnd(),c.areaEnd()}v&&(g[n]=+e(h,n,t),y[n]=+i(h,n,t),c.point(r?+r(h,n,t):g[n],o?+o(h,n,t):y[n]))}if(d)return c=null,d+""||null}function n(){return wf().defined(a).curve(f).context(u)}var e=mf,r=null,i=sf(0),o=xf,a=sf(!0),u=null,f=bf,c=null;return t.x=function(n){return arguments.length?(e="function"==typeof n?n:sf(+n),r=null,t):e},t.x0=function(n){return arguments.length?(e="function"==typeof n?n:sf(+n),t):e},t.x1=function(n){return arguments.length?(r=null==n?null:"function"==typeof n?n:sf(+n),t):r},t.y=function(n){return arguments.length?(i="function"==typeof n?n:sf(+n),o=null,t):i},t.y0=function(n){return arguments.length?(i="function"==typeof n?n:sf(+n),t):i},t.y1=function(n){return arguments.length?(o=null==n?null:"function"==typeof n?n:sf(+n),t):o},t.lineX0=t.lineY0=function(){return n().x(e).y(i)},t.lineY1=function(){return n().x(e).y(o)},t.lineX1=function(){return n().x(r).y(i)},t.defined=function(n){return arguments.length?(a="function"==typeof n?n:sf(!!n),t):a},t.curve=function(n){return arguments.length?(f=n,null!=u&&(c=f(u)),t):f},t.context=function(n){return arguments.length?(null==n?u=c=null:c=f(u=n),t):u},t}function Af(t,n){return nt?1:n>=t?0:NaN}function Tf(t){return t}function Nf(t){this._curve=t}function Sf(t){function n(n){return new Nf(t(n))}return n._curve=t,n}function Ef(t){var n=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?n(Sf(t)):n()._curve},t}function kf(){return Ef(wf().curve(N_))}function Cf(){var t=Mf().curve(N_),n=t.curve,e=t.lineX0,r=t.lineX1,i=t.lineY0,o=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return Ef(e())},delete t.lineX0,t.lineEndAngle=function(){return Ef(r())},delete t.lineX1,t.lineInnerRadius=function(){return Ef(i())},delete t.lineY0,t.lineOuterRadius=function(){return Ef(o())},delete t.lineY1,t.curve=function(t){return arguments.length?n(Sf(t)):n()._curve},t}function Pf(t,n){return[(n=+n)*Math.cos(t-=Math.PI/2),n*Math.sin(t)]}function zf(t){return t.source}function Rf(t){return t.target}function Lf(t){function n(){var n,u=S_.call(arguments),f=e.apply(this,u),c=r.apply(this,u);if(a||(a=n=oe()),t(a,+i.apply(this,(u[0]=f,u)),+o.apply(this,u),+i.apply(this,(u[0]=c,u)),+o.apply(this,u)),n)return a=null,n+""||null}var e=zf,r=Rf,i=mf,o=xf,a=null;return n.source=function(t){return arguments.length?(e=t,n):e},n.target=function(t){return arguments.length?(r=t,n):r},n.x=function(t){return arguments.length?(i="function"==typeof t?t:sf(+t),n):i},n.y=function(t){return arguments.length?(o="function"==typeof t?t:sf(+t),n):o},n.context=function(t){return arguments.length?(a=null==t?null:t,n):a},n}function Df(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n=(n+r)/2,e,n,i,r,i)}function Uf(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n,e=(e+i)/2,r,e,r,i)}function qf(t,n,e,r,i){var o=Pf(n,e),a=Pf(n,e=(e+i)/2),u=Pf(r,e),f=Pf(r,i);t.moveTo(o[0],o[1]),t.bezierCurveTo(a[0],a[1],u[0],u[1],f[0],f[1])}function Of(){}function Yf(t,n,e){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+n)/6,(t._y0+4*t._y1+e)/6)}function Bf(t){this._context=t}function Ff(t){this._context=t}function If(t){this._context=t}function jf(t,n){this._basis=new Bf(t),this._beta=n}function Hf(t,n,e){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-n),t._y2+t._k*(t._y1-e),t._x2,t._y2)}function Xf(t,n){this._context=t,this._k=(1-n)/6}function Gf(t,n){this._context=t,this._k=(1-n)/6}function Vf(t,n){this._context=t,this._k=(1-n)/6}function $f(t,n,e){var r=t._x1,i=t._y1,o=t._x2,a=t._y2;if(t._l01_a>w_){var u=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,f=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*u-t._x0*t._l12_2a+t._x2*t._l01_2a)/f,i=(i*u-t._y0*t._l12_2a+t._y2*t._l01_2a)/f}if(t._l23_a>w_){var c=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,s=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*c+t._x1*t._l23_2a-n*t._l12_2a)/s,a=(a*c+t._y1*t._l23_2a-e*t._l12_2a)/s}t._context.bezierCurveTo(r,i,o,a,t._x2,t._y2)}function Wf(t,n){this._context=t,this._alpha=n}function Zf(t,n){this._context=t,this._alpha=n}function Qf(t,n){this._context=t,this._alpha=n}function Jf(t){this._context=t}function Kf(t){return t<0?-1:1}function tc(t,n,e){var r=t._x1-t._x0,i=n-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),a=(e-t._y1)/(i||r<0&&-0),u=(o*i+a*r)/(r+i);return(Kf(o)+Kf(a))*Math.min(Math.abs(o),Math.abs(a),.5*Math.abs(u))||0}function nc(t,n){var e=t._x1-t._x0;return e?(3*(t._y1-t._y0)/e-n)/2:n}function ec(t,n,e){var r=t._x0,i=t._y0,o=t._x1,a=t._y1,u=(o-r)/3;t._context.bezierCurveTo(r+u,i+u*n,o-u,a-u*e,o,a)}function rc(t){this._context=t}function ic(t){this._context=new oc(t)}function oc(t){this._context=t}function ac(t){this._context=t}function uc(t){var n,e,r=t.length-1,i=new Array(r),o=new Array(r),a=new Array(r);for(i[0]=0,o[0]=2,a[0]=t[0]+2*t[1],n=1;n=0;--n)i[n]=(a[n]-i[n+1])/o[n];for(o[r-1]=(t[r]+i[r-1])/2,n=0;n1)for(var e,r,i,o=1,a=t[n[0]],u=a.length;o=0;)e[n]=n;return e}function lc(t,n){return t[n]}function hc(t){var n=t.map(dc);return sc(t).sort(function(t,e){return n[t]-n[e]})}function dc(t){for(var n,e=0,r=-1,i=t.length;++r0)){if(o/=h,h<0){if(o0){if(o>l)return;o>s&&(s=o)}if(o=r-f,h||!(o<0)){if(o/=h,h<0){if(o>l)return;o>s&&(s=o)}else if(h>0){if(o0)){if(o/=d,d<0){if(o0){if(o>l)return;o>s&&(s=o)}if(o=i-c,d||!(o<0)){if(o/=d,d<0){if(o>l)return;o>s&&(s=o)}else if(d>0){if(o0||l<1)||(s>0&&(t[0]=[f+s*h,c+s*d]),l<1&&(t[1]=[f+l*h,c+l*d]),!0)}}}}}function Nc(t,n,e,r,i){var o=t[1];if(o)return!0;var a,u,f=t[0],c=t.left,s=t.right,l=c[0],h=c[1],d=s[0],p=s[1],v=(l+d)/2,g=(h+p)/2;if(p===h){if(v=r)return;if(l>d){if(f){if(f[1]>=i)return}else f=[v,e];o=[v,i]}else{if(f){if(f[1]1)if(l>d){if(f){if(f[1]>=i)return}else f=[(e-u)/a,e];o=[(i-u)/a,i]}else{if(f){if(f[1]=r)return}else f=[n,a*n+u];o=[r,a*r+u]}else{if(f){if(f[0]=-ab)){var d=f*f+c*c,p=s*s+l*l,v=(l*d-c*p)/h,g=(f*p-s*d)/h,y=rb.pop()||new function(){_c(this),this.x=this.y=this.arc=this.site=this.cy=null};y.arc=t,y.site=i,y.x=v+a,y.y=(y.cy=g+u)+Math.sqrt(v*v+g*g),t.circle=y;for(var _=null,b=nb._;b;)if(y.yob)u=u.L;else{if(!((i=o-function(t,n){var e=t.N;if(e)return Uc(e,n);var r=t.site;return r[1]===n?r[0]:1/0}(u,a))>ob)){r>-ob?(n=u.P,e=u):i>-ob?(n=u,e=u.N):n=e=u;break}if(!u.R){n=u;break}u=u.R}(function(t){tb[t.index]={site:t,halfedges:[]}})(t);var f=zc(t);if(K_.insert(n,f),n||e){if(n===e)return Pc(n),e=zc(n.site),K_.insert(f,e),f.edge=e.edge=wc(n.site,f.site),Cc(n),void Cc(e);if(e){Pc(n),Pc(e);var c=n.site,s=c[0],l=c[1],h=t[0]-s,d=t[1]-l,p=e.site,v=p[0]-s,g=p[1]-l,y=2*(h*g-d*v),_=h*h+d*d,b=v*v+g*g,m=[(g*_-d*b)/y+s,(h*b-v*_)/y+l];Ac(e.edge,c,p,m),f.edge=wc(c,t,null,m),e.edge=wc(t,p,null,m),Cc(n),Cc(e)}else f.edge=wc(n.site,f.site)}}function Uc(t,n){var e=t.site,r=e[0],i=e[1],o=i-n;if(!o)return r;var a=t.P;if(!a)return-1/0;var u=(e=a.site)[0],f=e[1],c=f-n;if(!c)return u;var s=u-r,l=1/o-1/c,h=s/c;return l?(-h+Math.sqrt(h*h-2*l*(s*s/(-2*c)-f+c/2+i-o/2)))/l+r:(r+u)/2}function qc(t,n,e){return(t[0]-e[0])*(n[1]-t[1])-(t[0]-n[0])*(e[1]-t[1])}function Oc(t,n){return n[1]-t[1]||n[0]-t[0]}function Yc(t,n){var e,r,i,o=t.sort(Oc).pop();for(eb=[],tb=new Array(t.length),K_=new yc,nb=new yc;;)if(i=J_,o&&(!i||o[1]ob||Math.abs(i[0][1]-i[1][1])>ob)||delete eb[o]})(a,u,f,c),function(t,n,e,r){var i,o,a,u,f,c,s,l,h,d,p,v,g=tb.length,y=!0;for(i=0;iob||Math.abs(v-h)>ob)&&(f.splice(u,0,eb.push(Mc(a,d,Math.abs(p-t)ob?[t,Math.abs(l-t)ob?[Math.abs(h-r)ob?[e,Math.abs(l-e)ob?[Math.abs(h-n)r?(r+i)/2:Math.min(0,r)||Math.max(0,i),a>o?(o+a)/2:Math.min(0,o)||Math.max(0,a))}var Qc=e(n),Jc=Qc.right,Kc=Qc.left,ts=Array.prototype,ns=ts.slice,es=ts.map,rs=Math.sqrt(50),is=Math.sqrt(10),os=Math.sqrt(2),as=Array.prototype.slice,us=1,fs=2,cs=3,ss=4,ls=1e-6,hs={value:function(){}};S.prototype=N.prototype={constructor:S,on:function(t,n){var e,r=this._,i=function(t,n){return t.trim().split(/^|\s+/).map(function(t){var e="",r=t.indexOf(".");if(r>=0&&(e=t.slice(r+1),t=t.slice(0,r)),t&&!n.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}})}(t+"",r),o=-1,a=i.length;{if(!(arguments.length<2)){if(null!=n&&"function"!=typeof n)throw new Error("invalid callback: "+n);for(;++o0)for(var e,r,i=new Array(e),o=0;o=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var ms={};if(t.event=null,"undefined"!=typeof document){"onmouseenter"in document.documentElement||(ms={mouseenter:"mouseover",mouseleave:"mouseout"})}var xs=[null];ut.prototype=ft.prototype={constructor:ut,select:function(t){"function"!=typeof t&&(t=z(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i=m&&(m=b+1);!(_=g[m])&&++m=0;)(r=i[o])&&(a&&a!==r.nextSibling&&a.parentNode.insertBefore(r,a),a=r);return this},sort:function(t){function n(n,e){return n&&e?t(n.__data__,e.__data__):!n-!e}t||(t=Y);for(var e=this._groups,r=e.length,i=new Array(r),o=0;o1?this.each((null==n?function(t){return function(){this.style.removeProperty(t)}}:"function"==typeof n?function(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}:function(t,n,e){return function(){this.style.setProperty(t,n,e)}})(t,n,null==e?"":e)):F(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?function(t){return function(){delete this[t]}}:"function"==typeof n?function(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}:function(t,n){return function(){this[t]=n}})(t,n)):this.node()[t]},classed:function(t,n){var e=I(t+"");if(arguments.length<2){for(var r=j(this.node()),i=-1,o=e.length;++i=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}})}(t+""),a=o.length;if(!(arguments.length<2)){for(u=n?it:rt,null==e&&(e=!1),r=0;r=240?t-240:t+120,i,r),Yt(t,i,r),Yt(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1}}));var Ds=Math.PI/180,Us=180/Math.PI,qs=.96422,Os=1,Ys=.82521,Bs=4/29,Fs=6/29,Is=3*Fs*Fs,js=Fs*Fs*Fs;Nt(It,Ft,St(Et,{brighter:function(t){return new It(this.l+18*(null==t?1:t),this.a,this.b,this.opacity)},darker:function(t){return new It(this.l-18*(null==t?1:t),this.a,this.b,this.opacity)},rgb:function(){var t=(this.l+16)/116,n=isNaN(this.a)?t:t+this.a/500,e=isNaN(this.b)?t:t-this.b/200;return n=qs*Ht(n),t=Os*Ht(t),e=Ys*Ht(e),new Lt(Xt(3.1338561*n-1.6168667*t-.4906146*e),Xt(-.9787684*n+1.9161415*t+.033454*e),Xt(.0719453*n-.2289914*t+1.4052427*e),this.opacity)}})),Nt(Wt,$t,St(Et,{brighter:function(t){return new Wt(this.h,this.c,this.l+18*(null==t?1:t),this.opacity)},darker:function(t){return new Wt(this.h,this.c,this.l-18*(null==t?1:t),this.opacity)},rgb:function(){return Bt(this).rgb()}}));var Hs=-.29227,Xs=-.90649,Gs=1.97294,Vs=Gs*Xs,$s=1.78277*Gs,Ws=1.78277*Hs- -.14861*Xs;Nt(Qt,Zt,St(Et,{brighter:function(t){return t=null==t?1/.7:Math.pow(1/.7,t),new Qt(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?.7:Math.pow(.7,t),new Qt(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=isNaN(this.h)?0:(this.h+120)*Ds,n=+this.l,e=isNaN(this.s)?0:this.s*n*(1-n),r=Math.cos(t),i=Math.sin(t);return new Lt(255*(n+e*(-.14861*r+1.78277*i)),255*(n+e*(Hs*r+Xs*i)),255*(n+e*(Gs*r)),this.opacity)}}));var Zs,Qs,Js,Ks,tl,nl,el=function t(n){function e(t,n){var e=r((t=Rt(t)).r,(n=Rt(n)).r),i=r(t.g,n.g),o=r(t.b,n.b),a=an(t.opacity,n.opacity);return function(n){return t.r=e(n),t.g=i(n),t.b=o(n),t.opacity=a(n),t+""}}var r=on(n);return e.gamma=t,e}(1),rl=un(Kt),il=un(tn),ol=/[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g,al=new RegExp(ol.source,"g"),ul=180/Math.PI,fl={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1},cl=gn(function(t){return"none"===t?fl:(Zs||(Zs=document.createElement("DIV"),Qs=document.documentElement,Js=document.defaultView),Zs.style.transform=t,t=Js.getComputedStyle(Qs.appendChild(Zs),null).getPropertyValue("transform"),Qs.removeChild(Zs),t=t.slice(7,-1).split(","),vn(+t[0],+t[1],+t[2],+t[3],+t[4],+t[5]))},"px, ","px)","deg)"),sl=gn(function(t){return null==t?fl:(Ks||(Ks=document.createElementNS("http://www.w3.org/2000/svg","g")),Ks.setAttribute("transform",t),(t=Ks.transform.baseVal.consolidate())?(t=t.matrix,vn(t.a,t.b,t.c,t.d,t.e,t.f)):fl)},", ",")",")"),ll=Math.SQRT2,hl=2,dl=4,pl=1e-12,vl=bn(rn),gl=bn(an),yl=mn(rn),_l=mn(an),bl=xn(rn),ml=xn(an),xl=0,wl=0,Ml=0,Al=1e3,Tl=0,Nl=0,Sl=0,El="object"==typeof performance&&performance.now?performance:Date,kl="object"==typeof window&&window.requestAnimationFrame?window.requestAnimationFrame.bind(window):function(t){setTimeout(t,17)};An.prototype=Tn.prototype={constructor:An,restart:function(t,n,e){if("function"!=typeof t)throw new TypeError("callback is not a function");e=(null==e?wn():+e)+(null==n?0:+n),this._next||nl===this||(nl?nl._next=this:tl=this,nl=this),this._call=t,this._time=e,kn()},stop:function(){this._call&&(this._call=null,this._time=1/0,kn())}};var Cl=N("start","end","interrupt"),Pl=[],zl=0,Rl=1,Ll=2,Dl=3,Ul=4,ql=5,Ol=6,Yl=ft.prototype.constructor,Bl=0,Fl=ft.prototype;On.prototype=Yn.prototype={constructor:On,select:function(t){var n=this._name,e=this._id;"function"!=typeof t&&(t=z(t));for(var r=this._groups,i=r.length,o=new Array(i),a=0;a=0&&(t=t.slice(0,n)),!t||"start"===t})}(n)?zn:Rn;return function(){var a=o(this,t),u=a.on;u!==r&&(i=(r=u).copy()).on(n,e),a.on=i}}(e,t,n))},attr:function(t,n){var e=k(t),r="transform"===e?sl:qn;return this.attrTween(t,"function"==typeof n?(e.local?function(t,n,e){var r,i,o;return function(){var a,u=e(this);if(null!=u)return(a=this.getAttributeNS(t.space,t.local))===u?null:a===r&&u===i?o:o=n(r=a,i=u);this.removeAttributeNS(t.space,t.local)}}:function(t,n,e){var r,i,o;return function(){var a,u=e(this);if(null!=u)return(a=this.getAttribute(t))===u?null:a===r&&u===i?o:o=n(r=a,i=u);this.removeAttribute(t)}})(e,r,Un(this,"attr."+t,n)):null==n?(e.local?function(t){return function(){this.removeAttributeNS(t.space,t.local)}}:function(t){return function(){this.removeAttribute(t)}})(e):(e.local?function(t,n,e){var r,i;return function(){var o=this.getAttributeNS(t.space,t.local);return o===e?null:o===r?i:i=n(r=o,e)}}:function(t,n,e){var r,i;return function(){var o=this.getAttribute(t);return o===e?null:o===r?i:i=n(r=o,e)}})(e,r,n+""))},attrTween:function(t,n){var e="attr."+t;if(arguments.length<2)return(e=this.tween(e))&&e._value;if(null==n)return this.tween(e,null);if("function"!=typeof n)throw new Error;var r=k(t);return this.tween(e,(r.local?function(t,n){function e(){var e=this,r=n.apply(e,arguments);return r&&function(n){e.setAttributeNS(t.space,t.local,r(n))}}return e._value=n,e}:function(t,n){function e(){var e=this,r=n.apply(e,arguments);return r&&function(n){e.setAttribute(t,r(n))}}return e._value=n,e})(r,n))},style:function(t,n,e){var r="transform"==(t+="")?cl:qn;return null==n?this.styleTween(t,function(t,n){var e,r,i;return function(){var o=F(this,t),a=(this.style.removeProperty(t),F(this,t));return o===a?null:o===e&&a===r?i:i=n(e=o,r=a)}}(t,r)).on("end.style."+t,function(t){return function(){this.style.removeProperty(t)}}(t)):this.styleTween(t,"function"==typeof n?function(t,n,e){var r,i,o;return function(){var a=F(this,t),u=e(this);return null==u&&(this.style.removeProperty(t),u=F(this,t)),a===u?null:a===r&&u===i?o:o=n(r=a,i=u)}}(t,r,Un(this,"style."+t,n)):function(t,n,e){var r,i;return function(){var o=F(this,t);return o===e?null:o===r?i:i=n(r=o,e)}}(t,r,n+""),e)},styleTween:function(t,n,e){var r="style."+(t+="");if(arguments.length<2)return(r=this.tween(r))&&r._value;if(null==n)return this.tween(r,null);if("function"!=typeof n)throw new Error;return this.tween(r,function(t,n,e){function r(){var r=this,i=n.apply(r,arguments);return i&&function(n){r.style.setProperty(t,i(n),e)}}return r._value=n,r}(t,n,null==e?"":e))},text:function(t){return this.tween("text","function"==typeof t?function(t){return function(){var n=t(this);this.textContent=null==n?"":n}}(Un(this,"text",t)):function(t){return function(){this.textContent=t}}(null==t?"":t+""))},remove:function(){return this.on("end.remove",function(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}(this._id))},tween:function(t,n){var e=this._id;if(t+="",arguments.length<2){for(var r,i=Ln(this.node(),e).tween,o=0,a=i.length;o1e-6)if(Math.abs(s*u-f*c)>1e-6&&i){var h=e-o,d=r-a,p=u*u+f*f,v=h*h+d*d,g=Math.sqrt(p),y=Math.sqrt(l),_=i*Math.tan((Ph-Math.acos((p+l-v)/(2*g*y)))/2),b=_/y,m=_/g;Math.abs(b-1)>1e-6&&(this._+="L"+(t+b*c)+","+(n+b*s)),this._+="A"+i+","+i+",0,0,"+ +(s*h>c*d)+","+(this._x1=t+m*u)+","+(this._y1=n+m*f)}else this._+="L"+(this._x1=t)+","+(this._y1=n);else;},arc:function(t,n,e,r,i,o){t=+t,n=+n;var a=(e=+e)*Math.cos(r),u=e*Math.sin(r),f=t+a,c=n+u,s=1^o,l=o?r-i:i-r;if(e<0)throw new Error("negative radius: "+e);null===this._x1?this._+="M"+f+","+c:(Math.abs(this._x1-f)>1e-6||Math.abs(this._y1-c)>1e-6)&&(this._+="L"+f+","+c),e&&(l<0&&(l=l%zh+zh),l>Rh?this._+="A"+e+","+e+",0,1,"+s+","+(t-a)+","+(n-u)+"A"+e+","+e+",0,1,"+s+","+(this._x1=f)+","+(this._y1=c):l>1e-6&&(this._+="A"+e+","+e+",0,"+ +(l>=Ph)+","+s+","+(this._x1=t+e*Math.cos(i))+","+(this._y1=n+e*Math.sin(i))))},rect:function(t,n,e,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+n)+"h"+ +e+"v"+ +r+"h"+-e+"Z"},toString:function(){return this._}};le.prototype=he.prototype={constructor:le,has:function(t){return"$"+t in this},get:function(t){return this["$"+t]},set:function(t,n){return this["$"+t]=n,this},remove:function(t){var n="$"+t;return n in this&&delete this[n]},clear:function(){for(var t in this)"$"===t[0]&&delete this[t]},keys:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(n.slice(1));return t},values:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(this[n]);return t},entries:function(){var t=[];for(var n in this)"$"===n[0]&&t.push({key:n.slice(1),value:this[n]});return t},size:function(){var t=0;for(var n in this)"$"===n[0]&&++t;return t},empty:function(){for(var t in this)if("$"===t[0])return!1;return!0},each:function(t){for(var n in this)"$"===n[0]&&t(this[n],n.slice(1),this)}};var Lh=he.prototype;ye.prototype=_e.prototype={constructor:ye,has:Lh.has,add:function(t){return t+="",this["$"+t]=t,this},remove:Lh.remove,clear:Lh.clear,values:Lh.keys,size:Lh.size,empty:Lh.empty,each:Lh.each};var Dh=Array.prototype.slice,Uh=[[],[[[1,1.5],[.5,1]]],[[[1.5,1],[1,1.5]]],[[[1.5,1],[.5,1]]],[[[1,.5],[1.5,1]]],[[[1,1.5],[.5,1]],[[1,.5],[1.5,1]]],[[[1,.5],[1,1.5]]],[[[1,.5],[.5,1]]],[[[.5,1],[1,.5]]],[[[1,1.5],[1,.5]]],[[[.5,1],[1,.5]],[[1.5,1],[1,1.5]]],[[[1.5,1],[1,.5]]],[[[.5,1],[1.5,1]]],[[[1,1.5],[1.5,1]]],[[[.5,1],[1,1.5]]],[]],qh={},Oh={},Yh=34,Bh=10,Fh=13,Ih=ke(","),jh=Ih.parse,Hh=Ih.parseRows,Xh=Ih.format,Gh=Ih.formatRows,Vh=ke("\t"),$h=Vh.parse,Wh=Vh.parseRows,Zh=Vh.format,Qh=Vh.formatRows,Jh=Le(jh),Kh=Le($h),td=Ue("application/xml"),nd=Ue("text/html"),ed=Ue("image/svg+xml"),rd=je.prototype=He.prototype;rd.copy=function(){var t,n,e=new He(this._x,this._y,this._x0,this._y0,this._x1,this._y1),r=this._root;if(!r)return e;if(!r.length)return e._root=Xe(r),e;for(t=[{source:r,target:e._root=new Array(4)}];r=t.pop();)for(var i=0;i<4;++i)(n=r.source[i])&&(n.length?t.push({source:n,target:r.target[i]=new Array(4)}):r.target[i]=Xe(n));return e},rd.add=function(t){var n=+this._x.call(null,t),e=+this._y.call(null,t);return Ye(this.cover(n,e),n,e,t)},rd.addAll=function(t){var n,e,r,i,o=t.length,a=new Array(o),u=new Array(o),f=1/0,c=1/0,s=-1/0,l=-1/0;for(e=0;es&&(s=r),il&&(l=i));for(st||t>i||r>n||n>o))return this;var a,u,f=i-e,c=this._root;switch(u=(n<(r+o)/2)<<1|t<(e+i)/2){case 0:do{a=new Array(4),a[u]=c,c=a}while(f*=2,i=e+f,o=r+f,t>i||n>o);break;case 1:do{a=new Array(4),a[u]=c,c=a}while(f*=2,e=i-f,o=r+f,e>t||n>o);break;case 2:do{a=new Array(4),a[u]=c,c=a}while(f*=2,i=e+f,r=o-f,t>i||r>n);break;case 3:do{a=new Array(4),a[u]=c,c=a}while(f*=2,e=i-f,r=o-f,e>t||r>n)}this._root&&this._root.length&&(this._root=c)}return this._x0=e,this._y0=r,this._x1=i,this._y1=o,this},rd.data=function(){var t=[];return this.visit(function(n){if(!n.length)do{t.push(n.data)}while(n=n.next)}),t},rd.extent=function(t){return arguments.length?this.cover(+t[0][0],+t[0][1]).cover(+t[1][0],+t[1][1]):isNaN(this._x0)?void 0:[[this._x0,this._y0],[this._x1,this._y1]]},rd.find=function(t,n,e){var r,i,o,a,u,f,c,s=this._x0,l=this._y0,h=this._x1,d=this._y1,p=[],v=this._root;for(v&&p.push(new Be(v,s,l,h,d)),null==e?e=1/0:(s=t-e,l=n-e,h=t+e,d=n+e,e*=e);f=p.pop();)if(!(!(v=f.node)||(i=f.x0)>h||(o=f.y0)>d||(a=f.x1)=y)<<1|t>=g)&&(f=p[p.length-1],p[p.length-1]=p[p.length-1-c],p[p.length-1-c]=f)}else{var _=t-+this._x.call(null,v.data),b=n-+this._y.call(null,v.data),m=_*_+b*b;if(m=(u=(p+g)/2))?p=u:g=u,(s=a>=(f=(v+y)/2))?v=f:y=f,n=d,!(d=d[l=s<<1|c]))return this;if(!d.length)break;(n[l+1&3]||n[l+2&3]||n[l+3&3])&&(e=n,h=l)}for(;d.data!==t;)if(r=d,!(d=d.next))return this;return(i=d.next)&&delete d.next,r?(i?r.next=i:delete r.next,this):n?(i?n[l]=i:delete n[l],(d=n[0]||n[1]||n[2]||n[3])&&d===(n[3]||n[2]||n[1]||n[0])&&!d.length&&(e?e[h]=d:this._root=d),this):(this._root=i,this)},rd.removeAll=function(t){for(var n=0,e=t.length;n=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;tr.prototype=nr.prototype,nr.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(null==this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(null==this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};var ud,fd,cd={"%":function(t,n){return(100*t).toFixed(n)},b:function(t){return Math.round(t).toString(2)},c:function(t){return t+""},d:function(t){return Math.round(t).toString(10)},e:function(t,n){return t.toExponential(n)},f:function(t,n){return t.toFixed(n)},g:function(t,n){return t.toPrecision(n)},o:function(t){return Math.round(t).toString(8)},p:function(t,n){return er(100*t,n)},r:er,s:function(t,n){var e=Je(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(ud=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,a=r.length;return o===a?r:o>a?r+new Array(o-a+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+Je(t,Math.max(0,n+o-1))[0]},X:function(t){return Math.round(t).toString(16).toUpperCase()},x:function(t){return Math.round(t).toString(16)}},sd=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"];or({decimal:".",thousands:",",grouping:[3],currency:["$",""]}),sr.prototype={constructor:sr,reset:function(){this.s=this.t=0},add:function(t){lr(Id,t,this.t),lr(this,Id.s,this.s),this.s?this.t+=Id.t:this.s=Id.t},valueOf:function(){return this.s}};var ld,hd,dd,pd,vd,gd,yd,_d,bd,md,xd,wd,Md,Ad,Td,Nd,Sd,Ed,kd,Cd,Pd,zd,Rd,Ld,Dd,Ud,qd,Od,Yd,Bd,Fd,Id=new sr,jd=1e-6,Hd=1e-12,Xd=Math.PI,Gd=Xd/2,Vd=Xd/4,$d=2*Xd,Wd=180/Xd,Zd=Xd/180,Qd=Math.abs,Jd=Math.atan,Kd=Math.atan2,tp=Math.cos,np=Math.ceil,ep=Math.exp,rp=Math.log,ip=Math.pow,op=Math.sin,ap=Math.sign||function(t){return t>0?1:t<0?-1:0},up=Math.sqrt,fp=Math.tan,cp={Feature:function(t,n){gr(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++rjd?bd=90:pp<-jd&&(yd=-90),Td[0]=gd,Td[1]=_d}},gp={sphere:vr,point:Fr,lineStart:jr,lineEnd:Gr,polygonStart:function(){gp.lineStart=Vr,gp.lineEnd=$r},polygonEnd:function(){gp.lineStart=jr,gp.lineEnd=Gr}};Kr.invert=Kr;var yp,_p,bp,mp,xp,wp,Mp,Ap,Tp,Np,Sp,Ep=cr(),kp=di(function(){return!0},function(t){var n,e=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),n=1},point:function(o,a){var u=o>0?Xd:-Xd,f=Qd(o-e);Qd(f-Xd)0?Gd:-Gd),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(u,r),t.point(o,r),n=0):i!==u&&f>=Xd&&(Qd(e-i)jd?Jd((op(n)*(o=tp(r))*op(e)-op(r)*(i=tp(n))*op(t))/(i*o*a)):(n+r)/2}(e,r,o,a),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(u,r),n=0),t.point(e=o,r=a),i=u},lineEnd:function(){t.lineEnd(),e=r=NaN},clean:function(){return 2-n}}},function(t,n,e,r){var i;if(null==t)i=e*Gd,r.point(-Xd,i),r.point(0,i),r.point(Xd,i),r.point(Xd,0),r.point(Xd,-i),r.point(0,-i),r.point(-Xd,-i),r.point(-Xd,0),r.point(-Xd,i);else if(Qd(t[0]-n[0])>jd){var o=t[0]jp&&(jp=t),nHp&&(Hp=n)},lineStart:vr,lineEnd:vr,polygonStart:vr,polygonEnd:vr,result:function(){var t=[[Fp,Ip],[jp,Hp]];return jp=Hp=-(Ip=Fp=1/0),t}},Gp=0,Vp=0,$p=0,Wp=0,Zp=0,Qp=0,Jp=0,Kp=0,tv=0,nv={point:qi,lineStart:Oi,lineEnd:Fi,polygonStart:function(){nv.lineStart=Ii,nv.lineEnd=ji},polygonEnd:function(){nv.point=qi,nv.lineStart=Oi,nv.lineEnd=Fi},result:function(){var t=tv?[Jp/tv,Kp/tv]:Qp?[Wp/Qp,Zp/Qp]:$p?[Gp/$p,Vp/$p]:[NaN,NaN];return Gp=Vp=$p=Wp=Zp=Qp=Jp=Kp=tv=0,t}};Gi.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._context.moveTo(t,n),this._point=1;break;case 1:this._context.lineTo(t,n);break;default:this._context.moveTo(t+this._radius,n),this._context.arc(t,n,this._radius,0,$d)}},result:vr};var ev,rv,iv,ov,av,uv=cr(),fv={point:vr,lineStart:function(){fv.point=Vi},lineEnd:function(){ev&&$i(rv,iv),fv.point=vr},polygonStart:function(){ev=!0},polygonEnd:function(){ev=null},result:function(){var t=+uv;return uv.reset(),t}};Wi.prototype={_radius:4.5,_circle:Zi(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._string.push("Z"),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._string.push("M",t,",",n),this._point=1;break;case 1:this._string.push("L",t,",",n);break;default:null==this._circle&&(this._circle=Zi(this._radius)),this._string.push("M",t,",",n,this._circle)}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}return null}},Ji.prototype={constructor:Ji,point:function(t,n){this.stream.point(t,n)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var cv=16,sv=tp(30*Zd),lv=Qi({point:function(t,n){this.stream.point(t*Zd,n*Zd)}}),hv=ho(function(t){return up(2/(1+t))});hv.invert=po(function(t){return 2*dr(t/2)});var dv=ho(function(t){return(t=hr(t))&&t/op(t)});dv.invert=po(function(t){return t}),vo.invert=function(t,n){return[t,2*Jd(ep(n))-Gd]},bo.invert=bo,xo.invert=po(Jd),Mo.invert=function(t,n){var e,r=n,i=25;do{var o=r*r,a=o*o;r-=e=(r*(1.007226+o*(.015085+a*(.028874*o-.044475-.005916*a)))-n)/(1.007226+o*(.045255+a*(.259866*o-.311325-.005916*11*a)))}while(Qd(e)>jd&&--i>0);return[t/(.8707+(o=r*r)*(o*(o*o*o*(.003971-.001529*o)-.013791)-.131979)),r]},Ao.invert=po(dr),To.invert=po(function(t){return 2*Jd(t)}),No.invert=function(t,n){return[-n,2*Jd(ep(t))-Gd]},Do.prototype=Po.prototype={constructor:Do,count:function(){return this.eachAfter(Co)},each:function(t){var n,e,r,i,o=this,a=[o];do{for(n=a.reverse(),a=[];o=n.pop();)if(t(o),e=o.children)for(r=0,i=e.length;r=0;--e)i.push(n[e]);return this},sum:function(t){return this.eachAfter(function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e})},sort:function(t){return this.eachBefore(function(n){n.children&&n.children.sort(t)})},path:function(t){for(var n=this,e=function(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;for(t=e.pop(),n=r.pop();t===n;)i=t,t=e.pop(),n=r.pop();return i}(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){var t=[];return this.each(function(n){t.push(n)}),t},leaves:function(){var t=[];return this.eachBefore(function(n){n.children||t.push(n)}),t},links:function(){var t=this,n=[];return t.each(function(e){e!==t&&n.push({source:e.parent,target:e})}),n},copy:function(){return Po(this).eachBefore(Ro)}};var pv=Array.prototype.slice,vv="$",gv={depth:-1},yv={};ca.prototype=Object.create(Do.prototype);var _v=(1+Math.sqrt(5))/2,bv=function t(n){function e(t,e,r,i,o){la(n,t,e,r,i,o)}return e.ratio=function(n){return t((n=+n)>1?n:1)},e}(_v),mv=function t(n){function e(t,e,r,i,o){if((a=t._squarify)&&a.ratio===n)for(var a,u,f,c,s,l=-1,h=a.length,d=t.value;++l1?n:1)},e}(_v),xv=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,1===arguments.length?(e=t,t=0):e-=t,function(){return n()*e+t}}return e.source=t,e}(va),wv=function t(n){function e(t,e){var r,i;return t=null==t?0:+t,e=null==e?1:+e,function(){var o;if(null!=r)o=r,r=null;else do{r=2*n()-1,o=2*n()-1,i=r*r+o*o}while(!i||i>1);return t+e*o*Math.sqrt(-2*Math.log(i)/i)}}return e.source=t,e}(va),Mv=function t(n){function e(){var t=wv.source(n).apply(this,arguments);return function(){return Math.exp(t())}}return e.source=t,e}(va),Av=function t(n){function e(t){return function(){for(var e=0,r=0;r0?t>1?Ia(function(n){n.setTime(Math.floor(n/t)*t)},function(n,e){n.setTime(+n+e*t)},function(n,e){return(e-n)/t}):Lv:null};var Dv=Lv.range,Uv=6e4,qv=6048e5,Ov=Ia(function(t){t.setTime(1e3*Math.floor(t/1e3))},function(t,n){t.setTime(+t+1e3*n)},function(t,n){return(n-t)/1e3},function(t){return t.getUTCSeconds()}),Yv=Ov.range,Bv=Ia(function(t){t.setTime(Math.floor(t/Uv)*Uv)},function(t,n){t.setTime(+t+n*Uv)},function(t,n){return(n-t)/Uv},function(t){return t.getMinutes()}),Fv=Bv.range,Iv=Ia(function(t){var n=t.getTimezoneOffset()*Uv%36e5;n<0&&(n+=36e5),t.setTime(36e5*Math.floor((+t-n)/36e5)+n)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getHours()}),jv=Iv.range,Hv=Ia(function(t){t.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*Uv)/864e5},function(t){return t.getDate()-1}),Xv=Hv.range,Gv=ja(0),Vv=ja(1),$v=ja(2),Wv=ja(3),Zv=ja(4),Qv=ja(5),Jv=ja(6),Kv=Gv.range,tg=Vv.range,ng=$v.range,eg=Wv.range,rg=Zv.range,ig=Qv.range,og=Jv.range,ag=Ia(function(t){t.setDate(1),t.setHours(0,0,0,0)},function(t,n){t.setMonth(t.getMonth()+n)},function(t,n){return n.getMonth()-t.getMonth()+12*(n.getFullYear()-t.getFullYear())},function(t){return t.getMonth()}),ug=ag.range,fg=Ia(function(t){t.setMonth(0,1),t.setHours(0,0,0,0)},function(t,n){t.setFullYear(t.getFullYear()+n)},function(t,n){return n.getFullYear()-t.getFullYear()},function(t){return t.getFullYear()});fg.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Ia(function(n){n.setFullYear(Math.floor(n.getFullYear()/t)*t),n.setMonth(0,1),n.setHours(0,0,0,0)},function(n,e){n.setFullYear(n.getFullYear()+e*t)}):null};var cg=fg.range,sg=Ia(function(t){t.setUTCSeconds(0,0)},function(t,n){t.setTime(+t+n*Uv)},function(t,n){return(n-t)/Uv},function(t){return t.getUTCMinutes()}),lg=sg.range,hg=Ia(function(t){t.setUTCMinutes(0,0,0)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getUTCHours()}),dg=hg.range,pg=Ia(function(t){t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+n)},function(t,n){return(n-t)/864e5},function(t){return t.getUTCDate()-1}),vg=pg.range,gg=Ha(0),yg=Ha(1),_g=Ha(2),bg=Ha(3),mg=Ha(4),xg=Ha(5),wg=Ha(6),Mg=gg.range,Ag=yg.range,Tg=_g.range,Ng=bg.range,Sg=mg.range,Eg=xg.range,kg=wg.range,Cg=Ia(function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCMonth(t.getUTCMonth()+n)},function(t,n){return n.getUTCMonth()-t.getUTCMonth()+12*(n.getUTCFullYear()-t.getUTCFullYear())},function(t){return t.getUTCMonth()}),Pg=Cg.range,zg=Ia(function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCFullYear(t.getUTCFullYear()+n)},function(t,n){return n.getUTCFullYear()-t.getUTCFullYear()},function(t){return t.getUTCFullYear()});zg.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Ia(function(n){n.setUTCFullYear(Math.floor(n.getUTCFullYear()/t)*t),n.setUTCMonth(0,1),n.setUTCHours(0,0,0,0)},function(n,e){n.setUTCFullYear(n.getUTCFullYear()+e*t)}):null};var Rg,Lg=zg.range,Dg={"-":"",_:" ",0:"0"},Ug=/^\s*\d+/,qg=/^%/,Og=/[\\^$*+?|[\]().{}]/g;nf({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var Yg="%Y-%m-%dT%H:%M:%S.%LZ",Bg=Date.prototype.toISOString?function(t){return t.toISOString()}:t.utcFormat(Yg),Fg=+new Date("2000-01-01T00:00:00.000Z")?function(t){var n=new Date(t);return isNaN(n)?null:n}:t.utcParse(Yg),Ig=1e3,jg=60*Ig,Hg=60*jg,Xg=24*Hg,Gg=7*Xg,Vg=30*Xg,$g=365*Xg,Wg=uf("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"),Zg=uf("7fc97fbeaed4fdc086ffff99386cb0f0027fbf5b17666666"),Qg=uf("1b9e77d95f027570b3e7298a66a61ee6ab02a6761d666666"),Jg=uf("a6cee31f78b4b2df8a33a02cfb9a99e31a1cfdbf6fff7f00cab2d66a3d9affff99b15928"),Kg=uf("fbb4aeb3cde3ccebc5decbe4fed9a6ffffcce5d8bdfddaecf2f2f2"),ty=uf("b3e2cdfdcdaccbd5e8f4cae4e6f5c9fff2aef1e2cccccccc"),ny=uf("e41a1c377eb84daf4a984ea3ff7f00ffff33a65628f781bf999999"),ey=uf("66c2a5fc8d628da0cbe78ac3a6d854ffd92fe5c494b3b3b3"),ry=uf("8dd3c7ffffb3bebadafb807280b1d3fdb462b3de69fccde5d9d9d9bc80bdccebc5ffed6f"),iy=new Array(3).concat("d8b365f5f5f55ab4ac","a6611adfc27d80cdc1018571","a6611adfc27df5f5f580cdc1018571","8c510ad8b365f6e8c3c7eae55ab4ac01665e","8c510ad8b365f6e8c3f5f5f5c7eae55ab4ac01665e","8c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e","8c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e","5430058c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e003c30","5430058c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e003c30").map(uf),oy=ff(iy),ay=new Array(3).concat("af8dc3f7f7f77fbf7b","7b3294c2a5cfa6dba0008837","7b3294c2a5cff7f7f7a6dba0008837","762a83af8dc3e7d4e8d9f0d37fbf7b1b7837","762a83af8dc3e7d4e8f7f7f7d9f0d37fbf7b1b7837","762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b7837","762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b7837","40004b762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b783700441b","40004b762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b783700441b").map(uf),uy=ff(ay),fy=new Array(3).concat("e9a3c9f7f7f7a1d76a","d01c8bf1b6dab8e1864dac26","d01c8bf1b6daf7f7f7b8e1864dac26","c51b7de9a3c9fde0efe6f5d0a1d76a4d9221","c51b7de9a3c9fde0eff7f7f7e6f5d0a1d76a4d9221","c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221","c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221","8e0152c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221276419","8e0152c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221276419").map(uf),cy=ff(fy),sy=new Array(3).concat("998ec3f7f7f7f1a340","5e3c99b2abd2fdb863e66101","5e3c99b2abd2f7f7f7fdb863e66101","542788998ec3d8daebfee0b6f1a340b35806","542788998ec3d8daebf7f7f7fee0b6f1a340b35806","5427888073acb2abd2d8daebfee0b6fdb863e08214b35806","5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b35806","2d004b5427888073acb2abd2d8daebfee0b6fdb863e08214b358067f3b08","2d004b5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b358067f3b08").map(uf),ly=ff(sy),hy=new Array(3).concat("ef8a62f7f7f767a9cf","ca0020f4a58292c5de0571b0","ca0020f4a582f7f7f792c5de0571b0","b2182bef8a62fddbc7d1e5f067a9cf2166ac","b2182bef8a62fddbc7f7f7f7d1e5f067a9cf2166ac","b2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac","b2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac","67001fb2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac053061","67001fb2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac053061").map(uf),dy=ff(hy),py=new Array(3).concat("ef8a62ffffff999999","ca0020f4a582bababa404040","ca0020f4a582ffffffbababa404040","b2182bef8a62fddbc7e0e0e09999994d4d4d","b2182bef8a62fddbc7ffffffe0e0e09999994d4d4d","b2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d","b2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d","67001fb2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d1a1a1a","67001fb2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d1a1a1a").map(uf),vy=ff(py),gy=new Array(3).concat("fc8d59ffffbf91bfdb","d7191cfdae61abd9e92c7bb6","d7191cfdae61ffffbfabd9e92c7bb6","d73027fc8d59fee090e0f3f891bfdb4575b4","d73027fc8d59fee090ffffbfe0f3f891bfdb4575b4","d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4","d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4","a50026d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4313695","a50026d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4313695").map(uf),yy=ff(gy),_y=new Array(3).concat("fc8d59ffffbf91cf60","d7191cfdae61a6d96a1a9641","d7191cfdae61ffffbfa6d96a1a9641","d73027fc8d59fee08bd9ef8b91cf601a9850","d73027fc8d59fee08bffffbfd9ef8b91cf601a9850","d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850","d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850","a50026d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850006837","a50026d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850006837").map(uf),by=ff(_y),my=new Array(3).concat("fc8d59ffffbf99d594","d7191cfdae61abdda42b83ba","d7191cfdae61ffffbfabdda42b83ba","d53e4ffc8d59fee08be6f59899d5943288bd","d53e4ffc8d59fee08bffffbfe6f59899d5943288bd","d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd","d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd","9e0142d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd5e4fa2","9e0142d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd5e4fa2").map(uf),xy=ff(my),wy=new Array(3).concat("e5f5f999d8c92ca25f","edf8fbb2e2e266c2a4238b45","edf8fbb2e2e266c2a42ca25f006d2c","edf8fbccece699d8c966c2a42ca25f006d2c","edf8fbccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45006d2c00441b").map(uf),My=ff(wy),Ay=new Array(3).concat("e0ecf49ebcda8856a7","edf8fbb3cde38c96c688419d","edf8fbb3cde38c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d810f7c4d004b").map(uf),Ty=ff(Ay),Ny=new Array(3).concat("e0f3dba8ddb543a2ca","f0f9e8bae4bc7bccc42b8cbe","f0f9e8bae4bc7bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe0868ac084081").map(uf),Sy=ff(Ny),Ey=new Array(3).concat("fee8c8fdbb84e34a33","fef0d9fdcc8afc8d59d7301f","fef0d9fdcc8afc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301fb300007f0000").map(uf),ky=ff(Ey),Cy=new Array(3).concat("ece2f0a6bddb1c9099","f6eff7bdc9e167a9cf02818a","f6eff7bdc9e167a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016c59014636").map(uf),Py=ff(Cy),zy=new Array(3).concat("ece7f2a6bddb2b8cbe","f1eef6bdc9e174a9cf0570b0","f1eef6bdc9e174a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0045a8d023858").map(uf),Ry=ff(zy),Ly=new Array(3).concat("e7e1efc994c7dd1c77","f1eef6d7b5d8df65b0ce1256","f1eef6d7b5d8df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125698004367001f").map(uf),Dy=ff(Ly),Uy=new Array(3).concat("fde0ddfa9fb5c51b8a","feebe2fbb4b9f768a1ae017e","feebe2fbb4b9f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a017749006a").map(uf),qy=ff(Uy),Oy=new Array(3).concat("edf8b17fcdbb2c7fb8","ffffcca1dab441b6c4225ea8","ffffcca1dab441b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea8253494081d58").map(uf),Yy=ff(Oy),By=new Array(3).concat("f7fcb9addd8e31a354","ffffccc2e69978c679238443","ffffccc2e69978c67931a354006837","ffffccd9f0a3addd8e78c67931a354006837","ffffccd9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443006837004529").map(uf),Fy=ff(By),Iy=new Array(3).concat("fff7bcfec44fd95f0e","ffffd4fed98efe9929cc4c02","ffffd4fed98efe9929d95f0e993404","ffffd4fee391fec44ffe9929d95f0e993404","ffffd4fee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c02993404662506").map(uf),jy=ff(Iy),Hy=new Array(3).concat("ffeda0feb24cf03b20","ffffb2fecc5cfd8d3ce31a1c","ffffb2fecc5cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cbd0026800026").map(uf),Xy=ff(Hy),Gy=new Array(3).concat("deebf79ecae13182bd","eff3ffbdd7e76baed62171b5","eff3ffbdd7e76baed63182bd08519c","eff3ffc6dbef9ecae16baed63182bd08519c","eff3ffc6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b508519c08306b").map(uf),Vy=ff(Gy),$y=new Array(3).concat("e5f5e0a1d99b31a354","edf8e9bae4b374c476238b45","edf8e9bae4b374c47631a354006d2c","edf8e9c7e9c0a1d99b74c47631a354006d2c","edf8e9c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45006d2c00441b").map(uf),Wy=ff($y),Zy=new Array(3).concat("f0f0f0bdbdbd636363","f7f7f7cccccc969696525252","f7f7f7cccccc969696636363252525","f7f7f7d9d9d9bdbdbd969696636363252525","f7f7f7d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525000000").map(uf),Qy=ff(Zy),Jy=new Array(3).concat("efedf5bcbddc756bb1","f2f0f7cbc9e29e9ac86a51a3","f2f0f7cbc9e29e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a354278f3f007d").map(uf),Ky=ff(Jy),t_=new Array(3).concat("fee0d2fc9272de2d26","fee5d9fcae91fb6a4acb181d","fee5d9fcae91fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181da50f1567000d").map(uf),n_=ff(t_),e_=new Array(3).concat("fee6cefdae6be6550d","feeddefdbe85fd8d3cd94701","feeddefdbe85fd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d94801a636037f2704").map(uf),r_=ff(e_),i_=ml(Zt(300,.5,0),Zt(-240,.5,1)),o_=ml(Zt(-100,.75,.35),Zt(80,1.5,.8)),a_=ml(Zt(260,.75,.35),Zt(80,1.5,.8)),u_=Zt(),f_=Rt(),c_=Math.PI/3,s_=2*Math.PI/3,l_=cf(uf("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")),h_=cf(uf("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),d_=cf(uf("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),p_=cf(uf("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921")),v_=Math.abs,g_=Math.atan2,y_=Math.cos,__=Math.max,b_=Math.min,m_=Math.sin,x_=Math.sqrt,w_=1e-12,M_=Math.PI,A_=M_/2,T_=2*M_;_f.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._context.lineTo(t,n)}}};var N_=Sf(bf);Nf.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,n){this._curve.point(n*Math.sin(t),n*-Math.cos(t))}};var S_=Array.prototype.slice,E_={draw:function(t,n){var e=Math.sqrt(n/M_);t.moveTo(e,0),t.arc(0,0,e,0,T_)}},k_={draw:function(t,n){var e=Math.sqrt(n/5)/2;t.moveTo(-3*e,-e),t.lineTo(-e,-e),t.lineTo(-e,-3*e),t.lineTo(e,-3*e),t.lineTo(e,-e),t.lineTo(3*e,-e),t.lineTo(3*e,e),t.lineTo(e,e),t.lineTo(e,3*e),t.lineTo(-e,3*e),t.lineTo(-e,e),t.lineTo(-3*e,e),t.closePath()}},C_=Math.sqrt(1/3),P_=2*C_,z_={draw:function(t,n){var e=Math.sqrt(n/P_),r=e*C_;t.moveTo(0,-e),t.lineTo(r,0),t.lineTo(0,e),t.lineTo(-r,0),t.closePath()}},R_=Math.sin(M_/10)/Math.sin(7*M_/10),L_=Math.sin(T_/10)*R_,D_=-Math.cos(T_/10)*R_,U_={draw:function(t,n){var e=Math.sqrt(.8908130915292852*n),r=L_*e,i=D_*e;t.moveTo(0,-e),t.lineTo(r,i);for(var o=1;o<5;++o){var a=T_*o/5,u=Math.cos(a),f=Math.sin(a);t.lineTo(f*e,-u*e),t.lineTo(u*r-f*i,f*r+u*i)}t.closePath()}},q_={draw:function(t,n){var e=Math.sqrt(n),r=-e/2;t.rect(r,r,e,e)}},O_=Math.sqrt(3),Y_={draw:function(t,n){var e=-Math.sqrt(n/(3*O_));t.moveTo(0,2*e),t.lineTo(-O_*e,-e),t.lineTo(O_*e,-e),t.closePath()}},B_=Math.sqrt(3)/2,F_=1/Math.sqrt(12),I_=3*(F_/2+1),j_={draw:function(t,n){var e=Math.sqrt(n/I_),r=e/2,i=e*F_,o=r,a=e*F_+e,u=-o,f=a;t.moveTo(r,i),t.lineTo(o,a),t.lineTo(u,f),t.lineTo(-.5*r-B_*i,B_*r+-.5*i),t.lineTo(-.5*o-B_*a,B_*o+-.5*a),t.lineTo(-.5*u-B_*f,B_*u+-.5*f),t.lineTo(-.5*r+B_*i,-.5*i-B_*r),t.lineTo(-.5*o+B_*a,-.5*a-B_*o),t.lineTo(-.5*u+B_*f,-.5*f-B_*u),t.closePath()}},H_=[E_,k_,z_,q_,U_,Y_,j_];Bf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:Yf(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:Yf(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},Ff.prototype={areaStart:Of,areaEnd:Of,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x2=t,this._y2=n;break;case 1:this._point=2,this._x3=t,this._y3=n;break;case 2:this._point=3,this._x4=t,this._y4=n,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+n)/6);break;default:Yf(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},If.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var e=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+n)/6;this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break;case 3:this._point=4;default:Yf(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}},jf.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,n=this._y,e=t.length-1;if(e>0)for(var r,i=t[0],o=n[0],a=t[e]-i,u=n[e]-o,f=-1;++f<=e;)r=f/e,this._basis.point(this._beta*t[f]+(1-this._beta)*(i+r*a),this._beta*n[f]+(1-this._beta)*(o+r*u));this._x=this._y=null,this._basis.lineEnd()},point:function(t,n){this._x.push(+t),this._y.push(+n)}};var X_=function t(n){function e(t){return 1===n?new Bf(t):new jf(t,n)}return e.beta=function(n){return t(+n)},e}(.85);Xf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:Hf(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2,this._x1=t,this._y1=n;break;case 2:this._point=3;default:Hf(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var G_=function t(n){function e(t){return new Xf(t,n)}return e.tension=function(n){return t(+n)},e}(0);Gf.prototype={areaStart:Of,areaEnd:Of,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:Hf(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var V_=function t(n){function e(t){return new Gf(t,n)}return e.tension=function(n){return t(+n)},e}(0);Vf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:Hf(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var $_=function t(n){function e(t){return new Vf(t,n)}return e.tension=function(n){return t(+n)},e}(0);Wf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3;default:$f(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var W_=function t(n){function e(t){return n?new Wf(t,n):new Xf(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);Zf.prototype={areaStart:Of,areaEnd:Of,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:$f(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Z_=function t(n){function e(t){return n?new Zf(t,n):new Gf(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);Qf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:$f(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Q_=function t(n){function e(t){return n?new Qf(t,n):new Vf(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);Jf.prototype={areaStart:Of,areaEnd:Of,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(t,n){t=+t,n=+n,this._point?this._context.lineTo(t,n):(this._point=1,this._context.moveTo(t,n))}},rc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:ec(this,this._t0,nc(this,this._t0))}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){var e=NaN;if(t=+t,n=+n,t!==this._x1||n!==this._y1){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,ec(this,nc(this,e=tc(this,t,n)),e);break;default:ec(this,this._t0,e=tc(this,t,n))}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n,this._t0=e}}},(ic.prototype=Object.create(rc.prototype)).point=function(t,n){rc.prototype.point.call(this,n,t)},oc.prototype={moveTo:function(t,n){this._context.moveTo(n,t)},closePath:function(){this._context.closePath()},lineTo:function(t,n){this._context.lineTo(n,t)},bezierCurveTo:function(t,n,e,r,i,o){this._context.bezierCurveTo(n,t,r,e,o,i)}},ac.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var t=this._x,n=this._y,e=t.length;if(e)if(this._line?this._context.lineTo(t[0],n[0]):this._context.moveTo(t[0],n[0]),2===e)this._context.lineTo(t[1],n[1]);else for(var r=uc(t),i=uc(n),o=0,a=1;a=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,n),this._context.lineTo(t,n);else{var e=this._x*(1-this._t)+t*this._t;this._context.lineTo(e,this._y),this._context.lineTo(e,n)}}this._x=t,this._y=n}},yc.prototype={constructor:yc,insert:function(t,n){var e,r,i;if(t){if(n.P=t,n.N=t.N,t.N&&(t.N.P=n),t.N=n,t.R){for(t=t.R;t.L;)t=t.L;t.L=n}else t.R=n;e=t}else this._?(t=xc(this._),n.P=null,n.N=t,t.P=t.L=n,e=t):(n.P=n.N=null,this._=n,e=null);for(n.L=n.R=null,n.U=e,n.C=!0,t=n;e&&e.C;)e===(r=e.U).L?(i=r.R)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.R&&(bc(this,e),e=(t=e).U),e.C=!1,r.C=!0,mc(this,r)):(i=r.L)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.L&&(mc(this,e),e=(t=e).U),e.C=!1,r.C=!0,bc(this,r)),e=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var n,e,r,i=t.U,o=t.L,a=t.R;if(e=o?a?xc(a):o:a,i?i.L===t?i.L=e:i.R=e:this._=e,o&&a?(r=e.C,e.C=t.C,e.L=o,o.U=e,e!==a?(i=e.U,e.U=t.U,t=e.R,i.L=t,e.R=a,a.U=e):(e.U=i,i=e,t=e.R)):(r=t.C,t=e),t&&(t.U=i),!r)if(t&&t.C)t.C=!1;else{do{if(t===this._)break;if(t===i.L){if((n=i.R).C&&(n.C=!1,i.C=!0,bc(this,i),n=i.R),n.L&&n.L.C||n.R&&n.R.C){n.R&&n.R.C||(n.L.C=!1,n.C=!0,mc(this,n),n=i.R),n.C=i.C,i.C=n.R.C=!1,bc(this,i),t=this._;break}}else if((n=i.L).C&&(n.C=!1,i.C=!0,mc(this,i),n=i.L),n.L&&n.L.C||n.R&&n.R.C){n.L&&n.L.C||(n.R.C=!1,n.C=!0,bc(this,n),n=i.L),n.C=i.C,i.C=n.L.C=!1,mc(this,i),t=this._;break}n.C=!0,t=i,i=i.U}while(!t.C);t&&(t.C=!1)}}};var J_,K_,tb,nb,eb,rb=[],ib=[],ob=1e-6,ab=1e-12;Yc.prototype={constructor:Yc,polygons:function(){var t=this.edges;return this.cells.map(function(n){var e=n.halfedges.map(function(e){return Ec(n,t[e])});return e.data=n.site.data,e})},triangles:function(){var t=[],n=this.edges;return this.cells.forEach(function(e,r){if(o=(i=e.halfedges).length)for(var i,o,a,u=e.site,f=-1,c=n[i[o-1]],s=c.left===u?c.right:c.left;++f=u)return null;var f=t-i.site[0],c=n-i.site[1],s=f*f+c*c;do{i=o.cells[r=a],a=null,i.halfedges.forEach(function(e){var r=o.edges[e],u=r.left;if(u!==i.site&&u||(u=r.right)){var f=t-u[0],c=n-u[1],l=f*f+c*c;lt?1:n>=t?0:NaN},t.deviation=a,t.extent=u,t.histogram=function(){function t(t){var i,o,a=t.length,u=new Array(a);for(i=0;il;)h.pop(),--p;var v,g=new Array(p+1);for(i=0;i<=p;++i)(v=g[i]=[]).x0=i>0?h[i-1]:c,v.x1=i=o.length)return null!=e&&n.sort(e),null!=r?r(n):n;for(var f,c,s,l=-1,h=n.length,d=o[i++],p=he(),v=a();++lo.length)return t;var i,u=a[e-1];return null!=r&&e>=o.length?i=t.entries():(i=[],t.each(function(t,r){i.push({key:r,values:n(t,e)})})),null!=u?i.sort(function(t,n){return u(t.key,n.key)}):i}var e,r,i,o=[],a=[];return i={object:function(n){return t(n,0,de,pe)},map:function(n){return t(n,0,ve,ge)},entries:function(e){return n(t(e,0,ve,ge),0)},key:function(t){return o.push(t),i},sortKeys:function(t){return a[o.length-1]=t,i},sortValues:function(t){return e=t,i},rollup:function(t){return r=t,i}}},t.set=_e,t.map=he,t.keys=function(t){var n=[];for(var e in t)n.push(e);return n},t.values=function(t){var n=[];for(var e in t)n.push(t[e]);return n},t.entries=function(t){var n=[];for(var e in t)n.push({key:e,value:t[e]});return n},t.color=kt,t.rgb=Rt,t.hsl=qt,t.lab=Ft,t.hcl=$t,t.lch=function(t,n,e,r){return 1===arguments.length?Vt(t):new Wt(e,n,t,null==r?1:r)},t.gray=function(t,n){return new It(t,0,0,null==n?1:n)},t.cubehelix=Zt,t.contours=Me,t.contourDensity=function(){function t(t){var e=new Float32Array(v*y),r=new Float32Array(v*y);t.forEach(function(t,n,r){var i=a(t,n,r)+p>>h,o=u(t,n,r)+p>>h;i>=0&&i=0&&o>h),Te({width:v,height:y,data:r},{width:v,height:y,data:e},l>>h),Ae({width:v,height:y,data:e},{width:v,height:y,data:r},l>>h),Te({width:v,height:y,data:r},{width:v,height:y,data:e},l>>h),Ae({width:v,height:y,data:e},{width:v,height:y,data:r},l>>h),Te({width:v,height:y,data:r},{width:v,height:y,data:e},l>>h);var i=_(e);if(!Array.isArray(i)){var o=g(e);i=d(0,o,i),(i=s(0,Math.floor(o/i)*i,i)).shift()}return Me().thresholds(i).size([v,y])(e).map(n)}function n(t){return t.value*=Math.pow(2,-2*h),t.coordinates.forEach(e),t}function e(t){t.forEach(r)}function r(t){t.forEach(i)}function i(t){t[0]=t[0]*Math.pow(2,h)-p,t[1]=t[1]*Math.pow(2,h)-p}function o(){return p=3*l,v=f+2*p>>h,y=c+2*p>>h,t}var a=Ne,u=Se,f=960,c=500,l=20,h=2,p=3*l,v=f+2*p>>h,y=c+2*p>>h,_=me(20);return t.x=function(n){return arguments.length?(a="function"==typeof n?n:me(+n),t):a},t.y=function(n){return arguments.length?(u="function"==typeof n?n:me(+n),t):u},t.size=function(t){if(!arguments.length)return[f,c];var n=Math.ceil(t[0]),e=Math.ceil(t[1]);if(!(n>=0||n>=0))throw new Error("invalid size");return f=n,c=e,o()},t.cellSize=function(t){if(!arguments.length)return 1<=1))throw new Error("invalid cell size");return h=Math.floor(Math.log(t)/Math.LN2),o()},t.thresholds=function(n){return arguments.length?(_="function"==typeof n?n:Array.isArray(n)?me(Dh.call(n)):me(n),t):_},t.bandwidth=function(t){if(!arguments.length)return Math.sqrt(l*(l+1));if(!((t=+t)>=0))throw new Error("invalid bandwidth");return l=Math.round((Math.sqrt(4*t*t+1)-1)/2),o()},t},t.dispatch=N,t.drag=function(){function n(t){t.on("mousedown.drag",e).filter(g).on("touchstart.drag",o).on("touchmove.drag",a).on("touchend.drag touchcancel.drag",u).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(){if(!h&&d.apply(this,arguments)){var n=f("mouse",p.apply(this,arguments),pt,this,arguments);n&&(ct(t.event.view).on("mousemove.drag",r,!0).on("mouseup.drag",i,!0),_t(t.event.view),gt(),l=!1,c=t.event.clientX,s=t.event.clientY,n("start"))}}function r(){if(yt(),!l){var n=t.event.clientX-c,e=t.event.clientY-s;l=n*n+e*e>m}y.mouse("drag")}function i(){ct(t.event.view).on("mousemove.drag mouseup.drag",null),bt(t.event.view,l),yt(),y.mouse("end")}function o(){if(d.apply(this,arguments)){var n,e,r=t.event.changedTouches,i=p.apply(this,arguments),o=r.length;for(n=0;nf+d||ic+d||or.index){var p=f-u.x-u.vx,v=c-u.y-u.vy,g=p*p+v*v;gt.r&&(t.r=t[n].r)}function r(){if(i){var n,e,r=i.length;for(o=new Array(r),n=0;n=s)){(t.data!==o||t.next)&&(0===i&&(i=Oe(),d+=i*i),0===f&&(f=Oe(),d+=f*f),d1?(null==n?l.remove(t):l.set(t,i(n)),o):l.get(t)},find:function(n,e,r){var i,o,a,u,f,c=0,s=t.length;for(null==r?r=1/0:r*=r,c=0;c1?(d.on(t,n),o):d.on(t)}}},t.forceX=function(t){function n(t){for(var n,e=0,a=r.length;eOr(r[0],r[1])&&(r[1]=i[1]),Or(i[0],r[1])>Or(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(a=-1/0,n=0,r=o[e=o.length-1];n<=e;r=i,++n)i=o[n],(u=Or(r[1],i[0]))>a&&(a=u,gd=i[0],_d=r[1])}return Ad=Td=null,gd===1/0||yd===1/0?[[NaN,NaN],[NaN,NaN]]:[[gd,yd],[_d,bd]]},t.geoCentroid=function(t){Nd=Sd=Ed=kd=Cd=Pd=zd=Rd=Ld=Dd=Ud=0,br(t,gp);var n=Ld,e=Dd,r=Ud,i=n*n+e*e+r*r;return i=.12&&i<.234&&r>=-.425&&r<-.214?c:i>=.166&&i<.234&&r>=-.214&&r<-.115?s:f).invert(t)},t.stream=function(t){return e&&r===t?e:e=function(t){var n=t.length;return{point:function(e,r){for(var i=-1;++i2?t[2]+90:90]):(t=e(),[t[0],t[1],t[2]-90])},e([0,0,90]).scale(159.155)},t.geoTransverseMercatorRaw=No,t.geoRotation=ii,t.geoStream=br,t.geoTransform=function(t){return{stream:Qi(t)}},t.cluster=function(){function t(t){var o,a=0;t.eachAfter(function(t){var e=t.children;e?(t.x=function(t){return t.reduce(Eo,0)/t.length}(e),t.y=function(t){return 1+t.reduce(ko,0)}(e)):(t.x=o?a+=n(t,o):0,t.y=0,o=t)});var u=function(t){for(var n;n=t.children;)t=n[0];return t}(t),f=function(t){for(var n;n=t.children;)t=n[n.length-1];return t}(t),c=u.x-n(u,f)/2,s=f.x+n(f,u)/2;return t.eachAfter(i?function(n){n.x=(n.x-t.x)*e,n.y=(t.y-n.y)*r}:function(n){n.x=(n.x-c)/(s-c)*e,n.y=(1-(t.y?n.y/t.y:1))*r})}var n=So,e=1,r=1,i=!1;return t.separation=function(e){return arguments.length?(n=e,t):n},t.size=function(n){return arguments.length?(i=!1,e=+n[0],r=+n[1],t):i?null:[e,r]},t.nodeSize=function(n){return arguments.length?(i=!0,e=+n[0],r=+n[1],t):i?[e,r]:null},t},t.hierarchy=Po,t.pack=function(){function t(t){return t.x=e/2,t.y=r/2,n?t.eachBefore(Qo(n)).eachAfter(Jo(i,.5)).eachBefore(Ko(1)):t.eachBefore(Qo(Zo)).eachAfter(Jo($o,1)).eachAfter(Jo(i,t.r/Math.min(e,r))).eachBefore(Ko(Math.min(e,r)/(2*t.r))),t}var n=null,e=1,r=1,i=$o;return t.radius=function(e){return arguments.length?(n=function(t){return null==t?null:Vo(t)}(e),t):n},t.size=function(n){return arguments.length?(e=+n[0],r=+n[1],t):[e,r]},t.padding=function(n){return arguments.length?(i="function"==typeof n?n:Wo(+n),t):i},t},t.packSiblings=function(t){return Go(t),t},t.packEnclose=Uo,t.partition=function(){function t(t){var o=t.height+1;return t.x0=t.y0=r,t.x1=n,t.y1=e/o,t.eachBefore(function(t,n){return function(e){e.children&&na(e,e.x0,t*(e.depth+1)/n,e.x1,t*(e.depth+2)/n);var i=e.x0,o=e.y0,a=e.x1-r,u=e.y1-r;a0)throw new Error("cycle");return o}var n=ea,e=ra;return t.id=function(e){return arguments.length?(n=Vo(e),t):n},t.parentId=function(n){return arguments.length?(e=Vo(n),t):e},t},t.tree=function(){function t(t){var f=function(t){for(var n,e,r,i,o,a=new ca(t,0),u=[a];n=u.pop();)if(r=n._.children)for(n.children=new Array(o=r.length),i=o-1;i>=0;--i)u.push(e=n.children[i]=new ca(r[i],i)),e.parent=n;return(a.parent=new ca(null,0)).children=[a],a}(t);if(f.eachAfter(n),f.parent.m=-f.z,f.eachBefore(e),u)t.eachBefore(r);else{var c=t,s=t,l=t;t.eachBefore(function(t){t.xs.x&&(s=t),t.depth>l.depth&&(l=t)});var h=c===s?1:i(c,s)/2,d=h-c.x,p=o/(s.x+h+d),v=a/(l.depth||1);t.eachBefore(function(t){t.x=(t.x+d)*p,t.y=t.depth*v})}return t}function n(t){var n=t.children,e=t.parent.children,r=t.i?e[t.i-1]:null;if(n){(function(t){for(var n,e=0,r=0,i=t.children,o=i.length;--o>=0;)(n=i[o]).z+=e,n.m+=e,e+=n.s+(r+=n.c)})(t);var o=(n[0].z+n[n.length-1].z)/2;r?(t.z=r.z+i(t._,r._),t.m=t.z-o):t.z=o}else r&&(t.z=r.z+i(t._,r._));t.parent.A=function(t,n,e){if(n){for(var r,o=t,a=t,u=n,f=o.parent.children[0],c=o.m,s=a.m,l=u.m,h=f.m;u=aa(u),o=oa(o),u&&o;)f=oa(f),(a=aa(a)).a=t,(r=u.z+l-o.z-c+i(u._,o._))>0&&(ua(fa(u,t,e),t,r),c+=r,s+=r),l+=u.m,c+=o.m,h+=f.m,s+=a.m;u&&!aa(a)&&(a.t=u,a.m+=l-s),o&&!oa(f)&&(f.t=o,f.m+=c-h,e=t)}return e}(t,r,t.parent.A||e[0])}function e(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function r(t){t.x*=o,t.y=t.depth*a}var i=ia,o=1,a=1,u=null;return t.separation=function(n){return arguments.length?(i=n,t):i},t.size=function(n){return arguments.length?(u=!1,o=+n[0],a=+n[1],t):u?null:[o,a]},t.nodeSize=function(n){return arguments.length?(u=!0,o=+n[0],a=+n[1],t):u?[o,a]:null},t},t.treemap=function(){function t(t){return t.x0=t.y0=0,t.x1=i,t.y1=o,t.eachBefore(n),a=[0],r&&t.eachBefore(ta),t}function n(t){var n=a[t.depth],r=t.x0+n,i=t.y0+n,o=t.x1-n,h=t.y1-n;o=n-1){var c=f[t];return c.x0=r,c.y0=i,c.x1=a,void(c.y1=u)}for(var l=s[t],h=e/2+l,d=t+1,p=n-1;d>>1;s[v]u-i){var _=(r*y+a*g)/e;o(t,d,g,r,i,_,u),o(d,n,y,_,i,a,u)}else{var b=(i*y+u*g)/e;o(t,d,g,r,i,a,b),o(d,n,y,r,b,a,u)}}var a,u,f=t.children,c=f.length,s=new Array(c+1);for(s[0]=u=a=0;a=0;--n)c.push(t[r[o[n]][2]]);for(n=+u;nu!=c>u&&a<(f-e)*(u-r)/(c-r)+e&&(s=!s),f=e,c=r;return s},t.polygonLength=function(t){for(var n,e,r=-1,i=t.length,o=t[i-1],a=o[0],u=o[1],f=0;++r1)&&(t-=Math.floor(t));var n=Math.abs(t-.5);return u_.h=360*t-100,u_.s=1.5-1.5*n,u_.l=.8-.9*n,u_+""},t.interpolateWarm=o_,t.interpolateCool=a_,t.interpolateSinebow=function(t){var n;return t=(.5-t)*Math.PI,f_.r=255*(n=Math.sin(t))*n,f_.g=255*(n=Math.sin(t+c_))*n,f_.b=255*(n=Math.sin(t+s_))*n,f_+""},t.interpolateViridis=l_,t.interpolateMagma=h_,t.interpolateInferno=d_,t.interpolatePlasma=p_,t.create=function(t){return ct(C(t).call(document.documentElement))},t.creator=C,t.local=st,t.matcher=_s,t.mouse=pt,t.namespace=k,t.namespaces=ps,t.clientPoint=dt,t.select=ct,t.selectAll=function(t){return"string"==typeof t?new ut([document.querySelectorAll(t)],[document.documentElement]):new ut([null==t?[]:t],xs)},t.selection=ft,t.selector=z,t.selectorAll=L,t.style=F,t.touch=vt,t.touches=function(t,n){null==n&&(n=ht().touches);for(var e=0,r=n?n.length:0,i=new Array(r);eh;if(f||(f=t=oe()),lw_)if(p>T_-w_)f.moveTo(l*y_(h),l*m_(h)),f.arc(0,0,l,h,d,!v),s>w_&&(f.moveTo(s*y_(d),s*m_(d)),f.arc(0,0,s,d,h,v));else{var g,y,_=h,b=d,m=h,x=d,w=p,M=p,A=u.apply(this,arguments)/2,T=A>w_&&(i?+i.apply(this,arguments):x_(s*s+l*l)),N=b_(v_(l-s)/2,+r.apply(this,arguments)),S=N,E=N;if(T>w_){var k=lf(T/s*m_(A)),C=lf(T/l*m_(A));(w-=2*k)>w_?(k*=v?1:-1,m+=k,x-=k):(w=0,m=x=(h+d)/2),(M-=2*C)>w_?(C*=v?1:-1,_+=C,b-=C):(M=0,_=b=(h+d)/2)}var P=l*y_(_),z=l*m_(_),R=s*y_(x),L=s*m_(x);if(N>w_){var D=l*y_(b),U=l*m_(b),q=s*y_(m),O=s*m_(m);if(pw_?function(t,n,e,r,i,o,a,u){var f=e-t,c=r-n,s=a-i,l=u-o,h=(s*(n-o)-l*(t-i))/(l*f-s*c);return[t+h*f,n+h*c]}(P,z,q,O,D,U,R,L):[R,L],B=P-Y[0],F=z-Y[1],I=D-Y[0],j=U-Y[1],H=1/m_(function(t){return t>1?0:t<-1?M_:Math.acos(t)}((B*I+F*j)/(x_(B*B+F*F)*x_(I*I+j*j)))/2),X=x_(Y[0]*Y[0]+Y[1]*Y[1]);S=b_(N,(s-X)/(H-1)),E=b_(N,(l-X)/(H+1))}}M>w_?E>w_?(g=yf(q,O,P,z,l,E,v),y=yf(D,U,R,L,l,E,v),f.moveTo(g.cx+g.x01,g.cy+g.y01),Ew_&&w>w_?S>w_?(g=yf(R,L,D,U,s,-S,v),y=yf(P,z,q,O,s,-S,v),f.lineTo(g.cx+g.x01,g.cy+g.y01),S0&&(d+=l);for(null!=e?p.sort(function(t,n){return e(v[t],v[n])}):null!=r&&p.sort(function(n,e){return r(t[n],t[e])}),u=0,c=d?(y-h*b)/d:0;u0?l*c:0)+b,v[f]={data:t[f],index:u,value:l,startAngle:g,endAngle:s,padAngle:_};return v}var n=Tf,e=Af,r=null,i=sf(0),o=sf(T_),a=sf(0);return t.value=function(e){return arguments.length?(n="function"==typeof e?e:sf(+e),t):n},t.sortValues=function(n){return arguments.length?(e=n,r=null,t):e},t.sort=function(n){return arguments.length?(r=n,e=null,t):r},t.startAngle=function(n){return arguments.length?(i="function"==typeof n?n:sf(+n),t):i},t.endAngle=function(n){return arguments.length?(o="function"==typeof n?n:sf(+n),t):o},t.padAngle=function(n){return arguments.length?(a="function"==typeof n?n:sf(+n),t):a},t},t.areaRadial=Cf,t.radialArea=Cf,t.lineRadial=kf,t.radialLine=kf,t.pointRadial=Pf,t.linkHorizontal=function(){return Lf(Df)},t.linkVertical=function(){return Lf(Uf)},t.linkRadial=function(){var t=Lf(qf);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t},t.symbol=function(){function t(){var t;if(r||(r=t=oe()),n.apply(this,arguments).draw(r,+e.apply(this,arguments)),t)return r=null,t+""||null}var n=sf(E_),e=sf(64),r=null;return t.type=function(e){return arguments.length?(n="function"==typeof e?e:sf(e),t):n},t.size=function(n){return arguments.length?(e="function"==typeof n?n:sf(+n),t):e},t.context=function(n){return arguments.length?(r=null==n?null:n,t):r},t},t.symbols=H_,t.symbolCircle=E_,t.symbolCross=k_,t.symbolDiamond=z_,t.symbolSquare=q_,t.symbolStar=U_,t.symbolTriangle=Y_,t.symbolWye=j_,t.curveBasisClosed=function(t){return new Ff(t)},t.curveBasisOpen=function(t){return new If(t)},t.curveBasis=function(t){return new Bf(t)},t.curveBundle=X_,t.curveCardinalClosed=V_,t.curveCardinalOpen=$_,t.curveCardinal=G_,t.curveCatmullRomClosed=Z_,t.curveCatmullRomOpen=Q_,t.curveCatmullRom=W_,t.curveLinearClosed=function(t){return new Jf(t)},t.curveLinear=bf,t.curveMonotoneX=function(t){return new rc(t)},t.curveMonotoneY=function(t){return new ic(t)},t.curveNatural=function(t){return new ac(t)},t.curveStep=function(t){return new fc(t,.5)},t.curveStepAfter=function(t){return new fc(t,1)},t.curveStepBefore=function(t){return new fc(t,0)},t.stack=function(){function t(t){var o,a,u=n.apply(this,arguments),f=t.length,c=u.length,s=new Array(c);for(o=0;o0){for(var e,r,i,o=0,a=t[0].length;o1)for(var e,r,i,o,a,u,f=0,c=t[n[0]].length;f=0?(r[0]=o,r[1]=o+=i):i<0?(r[1]=a,r[0]=a+=i):r[0]=o},t.stackOffsetNone=cc,t.stackOffsetSilhouette=function(t,n){if((e=t.length)>0){for(var e,r=0,i=t[n[0]],o=i.length;r0&&(r=(e=t[n[0]]).length)>0){for(var e,r,i,o=0,a=1;aRl&&e.name===n)return new On([[t]],lh,n,+r)}return null},t.interrupt=Dn,t.voronoi=function(){function t(t){return new Yc(t.map(function(r,i){var o=[Math.round(n(r,i,t)/ob)*ob,Math.round(e(r,i,t)/ob)*ob];return o.index=i,o.data=r,o}),r)}var n=vc,e=gc,r=null;return t.polygons=function(n){return t(n).polygons()},t.links=function(n){return t(n).links()},t.triangles=function(n){return t(n).triangles()},t.x=function(e){return arguments.length?(n="function"==typeof e?e:pc(+e),t):n},t.y=function(n){return arguments.length?(e="function"==typeof n?n:pc(+n),t):e},t.extent=function(n){return arguments.length?(r=null==n?null:[[+n[0][0],+n[0][1]],[+n[1][0],+n[1][1]]],t):r&&[[r[0][0],r[0][1]],[r[1][0],r[1][1]]]},t.size=function(n){return arguments.length?(r=null==n?null:[[0,0],[+n[0],+n[1]]],t):r&&[r[1][0]-r[0][0],r[1][1]-r[0][1]]},t},t.zoom=function(){function n(t){t.property("__zoom",Vc).on("wheel.zoom",f).on("mousedown.zoom",c).on("dblclick.zoom",s).filter(m).on("touchstart.zoom",l).on("touchmove.zoom",h).on("touchend.zoom touchcancel.zoom",d).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(t,n){return(n=Math.max(x[0],Math.min(x[1],n)))===t.k?t:new Fc(n,t.x,t.y)}function r(t,n,e){var r=n[0]-e[0]*t.k,i=n[1]-e[1]*t.k;return r===t.x&&i===t.y?t:new Fc(t.k,r,i)}function i(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function o(t,n,e){t.on("start.zoom",function(){a(this,arguments).start()}).on("interrupt.zoom end.zoom",function(){a(this,arguments).end()}).tween("zoom",function(){var t=arguments,r=a(this,t),o=y.apply(this,t),u=e||i(o),f=Math.max(o[1][0]-o[0][0],o[1][1]-o[0][1]),c=this.__zoom,s="function"==typeof n?n.apply(this,t):n,l=A(c.invert(u).concat(f/c.k),s.invert(u).concat(f/s.k));return function(t){if(1===t)t=s;else{var n=l(t),e=f/n[2];t=new Fc(e,u[0]-n[0]*e,u[1]-n[1]*e)}r.zoom(null,t)}})}function a(t,n){for(var e,r=0,i=T.length;rC}n.zoom("mouse",_(r(n.that.__zoom,n.mouse[0]=pt(n.that),n.mouse[1]),n.extent,w))},!0).on("mouseup.zoom",function(){e.on("mousemove.zoom mouseup.zoom",null),bt(t.event.view,n.moved),Hc(),n.end()},!0),i=pt(this),o=t.event.clientX,u=t.event.clientY;_t(t.event.view),jc(),n.mouse=[i,this.__zoom.invert(i)],Dn(this),n.start()}}function s(){if(g.apply(this,arguments)){var i=this.__zoom,a=pt(this),u=i.invert(a),f=i.k*(t.event.shiftKey?.5:2),c=_(r(e(i,f),a,u),y.apply(this,arguments),w);Hc(),M>0?ct(this).transition().duration(M).call(o,c,a):ct(this).call(n.transform,c)}}function l(){if(g.apply(this,arguments)){var n,e,r,i,o=a(this,arguments),u=t.event.changedTouches,f=u.length;for(jc(),e=0;e - + @@ -65,7 +65,6 @@
- From e0e9c1777d396421f5a98f5dc55d30f5e01d3322 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 13 Jun 2018 20:07:44 +1000 Subject: [PATCH 17/53] TVB-2368 Use local file for d3v5.js --- .../genshi/visualizers/new_dual_brain/dual_brain_2d_view.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index b3a552e1a..9e06deda2 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -1,6 +1,6 @@
- + From 3cd69894736216f4b81a5422ae4581fdc7ae29dc Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 21 Jun 2018 15:20:08 +1000 Subject: [PATCH 18/53] TVB-2372 Move the time selection window along with the 3d slider --- .../commons/activity_toolbar_paused.html | 12 +++ .../dual_brain_3d_internal_view.html | 2 +- .../new_dual_brain/dual_brain_3d_view.html | 2 +- .../new_dual_brain/scripts/dualBrainViewer.js | 16 ++-- .../scripts/timeseries3DScript.js | 12 ++- .../new_dual_brain/scripts/timeseriesD3.js | 78 ++++++++++++++----- 6 files changed, 91 insertions(+), 31 deletions(-) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/commons/activity_toolbar_paused.html diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/activity_toolbar_paused.html b/tvb/interfaces/web/templates/genshi/visualizers/commons/activity_toolbar_paused.html new file mode 100644 index 000000000..b79e28132 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/activity_toolbar_paused.html @@ -0,0 +1,12 @@ +
    +
    + +
    +
    ms
    +
    +
  • + + ${drawTimeseriesSelectorButton(measurePointsTitle,labelsStateVar, labelsModes, groupedLabels, initialSelection)} +
  • +
diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html index 6d09e2c61..b0b458b19 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html @@ -19,6 +19,6 @@ }); - +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html index b7915559e..395b2ee4f 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -16,6 +16,6 @@ '${urlRegionBoundaries}', '${measurePointsSelectionGID}',${'true' if withTransparency else 'false'}); }); - +
\ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index 29bc8c17f..672330a97 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -438,7 +438,7 @@ function submitSelectedChannels(isEndOfData) { } - //TODO find why it's 1 and don't use hardcoded numbers here + //TODO find why it's 1 and don't use hardcoded numbers here, or use timeseries method var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; var selectedLabels = [] for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { @@ -475,18 +475,18 @@ var ts = null; //time selection functions function intervalIncrease() { - console.log(timeselection_interval); - timeselection_interval+=1; - $("#time-selection-interval").html(timeselection_interval) + timeselection_interval += 1; + $("#time-selection-interval").html(timeselection_interval); + tsView.timeselection_interval_increase(); } -function intervalDecrease(){ - timeselection_interval-=1; - $("#time-selection-interval").html(timeselection_interval) +function intervalDecrease() { + timeselection_interval -= 1; + $("#time-selection-interval").html(timeselection_interval); + tsView.timeselection_interval_decrease() } - function resizeToFillParent(ts) { var container, width, height; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js index 0207676fb..3054ad536 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js @@ -154,6 +154,7 @@ var activityMin = 0, activityMax = 0; var isOneToOneMapping = false; var isDoubleView = false; var isEEGView = false; +//apply transparency on the shell surface var withTransparency = false; var drawingMode; var VS_showLegend = true; @@ -424,6 +425,9 @@ function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, // For the double view the selection is the responsibility of the extended view functions } withTransparency = transparencyStatus; + //pause by default + AG_isStopped = true; + } function _isValidActivityData() { @@ -579,6 +583,11 @@ function _initSliders() { currentTimeValue = target.value; $('#TimeNow').val(currentTimeValue); }, + change: function (event, ui) { + triggered_by_timeselection = false; + tsView.timeselection_move_fn(); + triggered_by_timeselection = true; + }, stop: function (event, target) { sliderSel = false; loadFromTimeStep(target.value); @@ -1282,7 +1291,8 @@ function initActivityData() { * Load the brainviewer from this given time step. */ function loadFromTimeStep(step) { - showBlockerOverlay(50000); + // TODO doesn't work in firefox. not showing the loading process in other browsers + // showBlockerOverlay(50000); if (step % TIME_STEP !== 0) { step = step - step % TIME_STEP + TIME_STEP; // Set time to be multiple of step } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index 3fb2a8f45..876475a16 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -33,9 +33,14 @@ /* global tv, d3 */ -//will store the interval and time selection range -var timeselection_interval=0; -var timeselection=[]; +//added globals for time selection +var timeselection_interval = 0; +var timeselection = []; + +// identify the initiator of the change of the time selection: brushing or movie timeline +var triggered_by_timeselection = true; +//store the unmapped selection value used to animate the time selection window +var selection_x = []; tv = {}; @@ -394,7 +399,7 @@ tv.plot = { f.ul_ctx_y = {x: f.pad.x, y: f.pad.y}; f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; - f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y}; + f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y / 2}; f.ul_fcs = {x: f.ul_ctx_x.x, y: f.ul_ctx_y.y}; f.sz_fcs = {x: f.sz_ctx_x.x, y: f.sz_ctx_y.y}; @@ -776,18 +781,19 @@ tv.plot = { if (d3.event.selection != null) { event_selection_x[0] = d3.event.selection[0]; event_selection_x[1] = d3.event.selection[1]; + selection_x = event_selection_x; } var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; timeselection = event_selection_x; + // remove the last time's selection f.gp_ctx_x.selectAll(".selected-time").remove(); - //change the actual time point in the slider if (d3.event.selection != null) { - f.timeselection_update_fn() + f.timeselection_update_fn(triggered_by_timeselection) } @@ -857,8 +863,8 @@ tv.plot = { // add time selection brush group f.gp_br_ctx_x = f.gp_ctx_x.append("g"); //add title for the time selection area - f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10) - f.gp_br_ctx_x.append("g").classed("brush", true).call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); + f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10); + f.gp_br_ctx_x.classed("brush", true).attr("class", "time-selection-brush").call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); //add main focus brush group @@ -869,28 +875,60 @@ tv.plot = { //functions for the time selection window - f.timeselection_update_fn = function () { + f.timeselection_update_fn = function (triggered) { + //display the selected time range - f.text = f.gp_ctx_x.append("text").attr("class","selected-time").attr("id","time-selection") + f.text = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); + f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval").text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2)).attr("x", 100).attr("y", -10); + + if (triggered) { + timeselection_interval = timeselection[1] - timeselection[0]; - f.text_interval=f.gp_ctx_x.append("text").attr("class","selected-time").attr("id","time-selection-interval").text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2)).attr("x", 100).attr("y", -10); + //update the time in the input tag + d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); - timeselection_interval=timeselection[1] - timeselection[0]; + //update the time in the 3d viewer's time + $('#slider').slider('value', timeselection[0].toFixed(2)); + loadFromTimeStep(parseInt(timeselection[0])); - //update the time in the input tag - d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); + } - //update the time in the 3d viewer's time - $('#slider').slider('value', timeselection[0].toFixed(2)); - loadFromTimeStep(parseInt(timeselection[0])); }; - f.interval_increase=function(){ - timeselection[1]=timeselection[1]+1; - f.timeselection_update_fn() + //move the time selection window with the slider + f.timeselection_move_fn = function () { + redrawSelection() }; + + //increase and decease the interval by dt, need minus dt brought by the triggered change + f.timeselection_interval_increase = function () { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] - f.dt(), timeselection[1]].map(f.sc_ctx_x)); + } + + f.timeselection_interval_decrease = function () { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0]- f.dt(), timeselection[1] - 2*f.dt()].map(f.sc_ctx_x)); + + } + + + //need fix one additional step by any change + function redrawSelection() { + //>1 *timeStepsPerTick + if (timeStepsPerTick > 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); + } + //<1 1/2 *0.33 + else if (timeStepsPerTick < 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * 1 / (1 / timeStepsPerTick + 1), timeselection[1] + f.dt() * 1 / (1 / timeStepsPerTick + 1)].map(f.sc_ctx_x)); + } + else if (timeStepsPerTick === 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt(), timeselection[1] + f.dt()].map(f.sc_ctx_x)); + } + } + + f.parameters = ["w", "h", "p", "baseURL", "preview", "labels", "shape", "t0", "dt", "ts", "ys", "point_limit", "channels", "mode", "state_var"]; f.parameters.map(function (name) { From 621e1390826fc178c7b76e1638ec5df615e4f38d Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 25 Jun 2018 10:57:17 +1000 Subject: [PATCH 19/53] TVB-2372 Fix showBlockerOverlay not working in Firefox Chrome still doesn't display an overlay (in both the old and new dual viewer) --- tvb/interfaces/web/static/style/subsection_timeseries.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb/interfaces/web/static/style/subsection_timeseries.css b/tvb/interfaces/web/static/style/subsection_timeseries.css index 7b0131caa..ee815cdb4 100644 --- a/tvb/interfaces/web/static/style/subsection_timeseries.css +++ b/tvb/interfaces/web/static/style/subsection_timeseries.css @@ -30,7 +30,7 @@ /* !------------------------------------------------------- */ /*overlay changed in d3v4 for new dual brain viewer*/ -.overlay{ +rect.overlay{ visibility: hidden; } From 8b8727cc6ff903d4ea7652f69d51bfe2fa3f750d Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 9 Jul 2018 15:14:24 +1000 Subject: [PATCH 20/53] TVB-2372 Display Dynamic Spheres from the energy computed --- .../commons/scripts/internalBrain.js | 5 ++- .../commons/scripts/visualizers_commons.js | 6 +++ .../new_dual_brain/dual_brain_2d_view.html | 14 ++++--- .../new_dual_brain/scripts/dualBrainViewer.js | 11 ++---- .../scripts/timeseries3DScript.js | 33 ++++++++++++++-- .../new_dual_brain/scripts/timeseriesD3.js | 39 +++++++++++++------ .../visualizers/new_dual_brain/view.html | 2 +- 7 files changed, 81 insertions(+), 29 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js index 3f88321ec..868179aa0 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js @@ -1,5 +1,5 @@ /** - * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * TheVirtualBrain-Framework Package. This package holds all Data Management, and * Web-UI helpful to run brain-simulations. To use it, you also need do download * TheVirtualBrain-Scientific Package (for simulators). See content of the * documentation-folder for more details. See also http://www.thevirtualbrain.org @@ -73,4 +73,7 @@ function VSI_StartInternalActivityViewer(baseDatatypeURL, onePageSize, urlTimeLi isFaceToDisplay = true; _VSI_init_sphericalMeasurePoints(); + + //pause by default + AG_isStopped = true;; } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/visualizers_commons.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/visualizers_commons.js index 0e49b1149..679ab5843 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/visualizers_commons.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/visualizers_commons.js @@ -42,6 +42,12 @@ function readDataChannelURL(baseDatatypeMethodURL, fromIdx, toIdx, stateVariable return baseURL.replace('read_data_page', 'read_channels_page') + ';channels_list=' + channels; } +function readDataEnergyURL(baseDatatypeMethodURL, fromIdx, toIdx, stateVariable, mode, step, channels,timeselectionlLength) { + const baseURL = readDataPageURL(baseDatatypeMethodURL, fromIdx, toIdx, stateVariable, mode, step); + return baseURL.replace('read_data_page', 'read_time_selection_energy') + ';channels_list=' + channels+ + ';interval_length=' + timeselectionlLength ; +} + // ------ Datatype methods mappings end here diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index 9e06deda2..a088f98d6 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -54,13 +54,17 @@ buttonTitle="Select signals from Input %d" % (idx+1))} - -
-
-
+
    + +
    + Increase Interval +
    +
    + Decrease Interval +
    +
-
diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index 672330a97..3ca867cf4 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -163,6 +163,8 @@ var AG_modeSelector = null; // GID for the D3 viewer var filterGid = null; +//timeseries viewer +var ts = null; window.onresize = function () { resizeToFillParent(); @@ -388,7 +390,6 @@ function _AG_getSelectedDataAndLongestChannelIndex(data) { * exist then just use the previous 'displayedChannels' (or default in case of first run). */ function submitSelectedChannels(isEndOfData) { - AG_currentIndex = AG_numberOfVisiblePoints; if (AG_submitableSelectedChannels.length === 0) { AG_submitableSelectedChannels = displayedChannels.slice(); @@ -438,14 +439,14 @@ function submitSelectedChannels(isEndOfData) { } - //TODO find why it's 1 and don't use hardcoded numbers here, or use timeseries method + //The shape we use for time series now only uses 1D var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; var selectedLabels = [] for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { selectedLabels.push([chanDisplayLabels[displayedChannels[i]]]); } - + //use d3 to create 2D plot ts = tv.plot.time_series(); ts.baseURL(baseDataURLS[0]).preview(false).mode(0).state_var(0); ts.shape(dataShape).t0(AG_time[1] / 2).dt(AG_time[1]); @@ -470,9 +471,6 @@ function submitSelectedChannels(isEndOfData) { } -//timeseries viewer -var ts = null; - //time selection functions function intervalIncrease() { timeselection_interval += 1; @@ -603,7 +601,6 @@ function loadEEGChartFromTimeStep(step) { const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); - totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page currentDataFileIndex = chunkForStep; AG_displayedPoints = []; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js index 3054ad536..94ee06397 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js @@ -387,6 +387,10 @@ function _VS_init_cubicalMeasurePoints() { } + + + + function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelveObject, minMeasure, maxMeasure, urlMeasure, hemisphereChunkMask) { @@ -562,7 +566,7 @@ function _initSliders() { if (timeData.length > 0) { $("#sliderStep").slider({ - min: 0, max: maxSpeedSlider, step: 1, value: 5, + min: 0.49, max: maxSpeedSlider, step: 1, value: 5, stop: function () { refreshCurrentDataSlice(); sliderSel = false; @@ -1116,6 +1120,10 @@ function tick() { const currentTimeInFrame = Math.floor((currentTimeValue - totalPassedActivitiesData) / TIME_STEP); updateColors(currentTimeInFrame); + //update energy + if(timeselection_interval!=0){ + init_cubicalMeasurePoints_energy(); + } drawScene(); /// Update FPS and Movie timeline @@ -1130,7 +1138,8 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { - document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); + //TODO workaround for incorrect time values + document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue]+0.49, 2); } let meanFrameTime = 0; for (let i = 0; i < framestime.length; i++) { @@ -1291,8 +1300,7 @@ function initActivityData() { * Load the brainviewer from this given time step. */ function loadFromTimeStep(step) { - // TODO doesn't work in firefox. not showing the loading process in other browsers - // showBlockerOverlay(50000); + showBlockerOverlay(50000); if (step % TIME_STEP !== 0) { step = step - step % TIME_STEP + TIME_STEP; // Set time to be multiple of step } @@ -1411,3 +1419,20 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// + +/////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// + +//init spheres with energy controlling the radius +function init_cubicalMeasurePoints_energy() { + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + // generate spheres + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i],timeselection_energy[i][currentTimeValue]); + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } +} + +/////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index 876475a16..2fd85d550 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -34,6 +34,7 @@ /* global tv, d3 */ //added globals for time selection +var timeselection_interval_length = 0;//integer var timeselection_interval = 0; var timeselection = []; @@ -42,6 +43,9 @@ var triggered_by_timeselection = true; //store the unmapped selection value used to animate the time selection window var selection_x = []; +//store the energy calculated from the time selection +var timeselection_energy =[]; + tv = {}; tv.util = { @@ -99,6 +103,14 @@ tv.util = { //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. // Method called is from time_series.py. $.getJSON(readDataURL, callback); + }, + + get_time_selection_energy: function (baseURL, slices, callback, channels, currentMode, currentStateVar, timeselectionlLength) { + var readDataURL = readDataEnergyURL(baseURL, slices[0].lo, slices[0].hi, + currentStateVar, currentMode, slices[0].di, JSON.stringify(channels), timeselectionlLength); + //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. + // Method called is from time_series.py. + $.getJSON(readDataURL, callback) } }; @@ -327,9 +339,13 @@ tv.plot = { f.render(); }; // end function f() + f.energy_callback = function (data) { + timeselection_energy=data; + init_cubicalMeasurePoints_energy(); + }; + f.render = function () { f.status_line.text("waiting for data from server..."); - //console.log(f.baseURL(), f.current_slice()) tv.util.get_array_slice(f.baseURL(), f.current_slice(), f.render_callback, f.channels(), f.mode(), f.state_var()); }; @@ -776,6 +792,7 @@ tv.plot = { br_ctx_end = function () { + //get the selected time range var event_selection_x = []; if (d3.event.selection != null) { @@ -783,9 +800,7 @@ tv.plot = { event_selection_x[1] = d3.event.selection[1]; selection_x = event_selection_x; } - var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); - event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); - dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; + event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); timeselection = event_selection_x; // remove the last time's selection @@ -795,6 +810,7 @@ tv.plot = { if (d3.event.selection != null) { f.timeselection_update_fn(triggered_by_timeselection) } + timeselection_interval_length=parseInt(timeselection_interval/f.dt())-1; }; @@ -878,20 +894,23 @@ tv.plot = { f.timeselection_update_fn = function (triggered) { //display the selected time range - f.text = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") + f.text_timeselection_range = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); - f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval").text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2)).attr("x", 100).attr("y", -10); + f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval") + .text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2) + " ms").attr("x", 100).attr("y", -10); if (triggered) { timeselection_interval = timeselection[1] - timeselection[0]; //update the time in the input tag d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); - //update the time in the 3d viewer's time $('#slider').slider('value', timeselection[0].toFixed(2)); loadFromTimeStep(parseInt(timeselection[0])); + //call the energy computation method + //TODO update channel info when changed + tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); } }; @@ -908,18 +927,16 @@ tv.plot = { } f.timeselection_interval_decrease = function () { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0]- f.dt(), timeselection[1] - 2*f.dt()].map(f.sc_ctx_x)); + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] - f.dt(), timeselection[1] - 2 * f.dt()].map(f.sc_ctx_x)); } - //need fix one additional step by any change + //TODO need to fix one additional step brought by any change function redrawSelection() { - //>1 *timeStepsPerTick if (timeStepsPerTick > 1) { d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); } - //<1 1/2 *0.33 else if (timeStepsPerTick < 1) { d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * 1 / (1 / timeStepsPerTick + 1), timeselection[1] + f.dt() * 1 / (1 / timeStepsPerTick + 1)].map(f.sc_ctx_x)); } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html index 8aab2cc15..d960453b0 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/view.html @@ -15,7 +15,7 @@ From 02302756b386e7be23c5e0d28d14811cff303dd1 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 13 Jul 2018 17:10:13 +1000 Subject: [PATCH 21/53] TVB-2372 Channel selector for the sphere rendering --- .../scripts/timeseries3DScript.js | 25 ++++++++----------- .../new_dual_brain/scripts/timeseriesD3.js | 17 ++++++------- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js index 94ee06397..4b43b3022 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js @@ -17,7 +17,7 @@ * **/ -/* globals gl, GL_shaderProgram, SHADING_Context */ +/* globals gl, GL_shaderProgram, SHADING_Context tsView */ /** * WebGL methods "inheriting" from webGL_xx.js in static/js. @@ -377,7 +377,7 @@ function _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVert function _VS_init_cubicalMeasurePoints() { for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 3);//3 for the default radius value now, we will modify it later + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; @@ -387,10 +387,6 @@ function _VS_init_cubicalMeasurePoints() { } - - - - function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelveObject, minMeasure, maxMeasure, urlMeasure, hemisphereChunkMask) { @@ -1121,8 +1117,9 @@ function tick() { updateColors(currentTimeInFrame); //update energy - if(timeselection_interval!=0){ - init_cubicalMeasurePoints_energy(); + if(timeselection_interval!=0 && !AG_isStopped){ + + changeCubicalMeasurePoints_energy(); } drawScene(); @@ -1248,7 +1245,6 @@ function drawScene() { //display the channel name if (VS_pickedIndex != -1) { displayMessage("The highlighted node is " + measurePointsLabels[VS_pickedIndex], "infoMessage") - } doPick = false; gl.bindFramebuffer(gl.FRAMEBUFFER, null); @@ -1421,12 +1417,12 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// /////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// - //init spheres with energy controlling the radius -function init_cubicalMeasurePoints_energy() { - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { +function changeCubicalMeasurePoints_energy() { + selectedchannels=tsView.channels() + for (let i = 0; i < selectedchannels.length; i++) { // generate spheres - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i],timeselection_energy[i][currentTimeValue]); + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[selectedchannels[i]],timeselection_energy[i][currentTimeValue]); const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; @@ -1434,5 +1430,4 @@ function init_cubicalMeasurePoints_energy() { measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; } } - -/////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// +/////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js index 2fd85d550..6e2bbcf1c 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js @@ -44,7 +44,7 @@ var triggered_by_timeselection = true; var selection_x = []; //store the energy calculated from the time selection -var timeselection_energy =[]; +var timeselection_energy = []; tv = {}; @@ -340,8 +340,8 @@ tv.plot = { }; // end function f() f.energy_callback = function (data) { - timeselection_energy=data; - init_cubicalMeasurePoints_energy(); + timeselection_energy = data; + changeCubicalMeasurePoints_energy(); }; f.render = function () { @@ -810,7 +810,6 @@ tv.plot = { if (d3.event.selection != null) { f.timeselection_update_fn(triggered_by_timeselection) } - timeselection_interval_length=parseInt(timeselection_interval/f.dt())-1; }; @@ -901,6 +900,10 @@ tv.plot = { if (triggered) { timeselection_interval = timeselection[1] - timeselection[0]; + timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; + + //call the energy computation method + tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); //update the time in the input tag d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); @@ -908,11 +911,7 @@ tv.plot = { $('#slider').slider('value', timeselection[0].toFixed(2)); loadFromTimeStep(parseInt(timeselection[0])); - //call the energy computation method - //TODO update channel info when changed - tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); } - }; //move the time selection window with the slider @@ -928,10 +927,8 @@ tv.plot = { f.timeselection_interval_decrease = function () { d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] - f.dt(), timeselection[1] - 2 * f.dt()].map(f.sc_ctx_x)); - } - //TODO need to fix one additional step brought by any change function redrawSelection() { if (timeStepsPerTick > 1) { From dca028537e63943e245a38478c32d6a6b92fd075 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 20 Jul 2018 18:57:45 +1000 Subject: [PATCH 22/53] TVB-2378 synchronize channels selection with spheres highlighting replace increase/decrease buttons with input fields add animation of the time selection when zoomed in send index number to the 3d slider --- .../new_dual_brain/dual_brain_2d_view.html | 15 ++++--- .../new_dual_brain/scripts/dualBrainViewer.js | 24 +++++------ .../scripts/timeseries3DScript.js | 8 ++-- .../new_dual_brain/scripts/timeseriesD3.js | 41 ++++++++++++------- 4 files changed, 52 insertions(+), 36 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index a088f98d6..5e2e249e6 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -56,13 +56,16 @@
    - -
    - Increase Interval -
    -
    - Decrease Interval + +
    + ms + ms +
    + Submit +
    + +
diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index 3ca867cf4..775764102 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -459,6 +459,8 @@ function submitSelectedChannels(isEndOfData) { ts(d3.select("#time-series-viewer")); tsView = ts; + VS_selectedchannels=tsView.channels(); + // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; @@ -471,19 +473,6 @@ function submitSelectedChannels(isEndOfData) { } -//time selection functions -function intervalIncrease() { - timeselection_interval += 1; - $("#time-selection-interval").html(timeselection_interval); - tsView.timeselection_interval_increase(); -} - -function intervalDecrease() { - timeselection_interval -= 1; - $("#time-selection-interval").html(timeselection_interval); - tsView.timeselection_interval_decrease() -} - function resizeToFillParent(ts) { var container, width, height; @@ -1025,3 +1014,12 @@ function updateSpeedFactor() { } //------------------------------------------------END SPEED RELATED CODE-------------------------------------------------------- +//------------------------------------------------START TIME SERIES TIME SELECTION RELATED CODE-------------------------------------------------------- + + function intervalSet(){ + var start=$('#SetIntervalStart').val(); + var end=$('#SetIntervalEnd').val(); + if(start 1) { d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); } @@ -942,6 +934,27 @@ tv.plot = { } } + f.jump_to_next_time_range = function(){ + var time_data_length=f.shape()[0]; + var current_slice_length=f.current_slice()[0].hi-f.current_slice()[0].lo; + if(f.current_slice()[0].hi+current_slice_length Date: Tue, 24 Jul 2018 10:11:57 +1000 Subject: [PATCH 23/53] TVB-2379 Merge timeseriesD3 into tvbviz only the timeseries plot uses d3 version 5, others use version 3 --- tvb/interfaces/web/static/js/tvbviz.js | 434 +++++--- .../new_dual_brain/dual_brain_2d_view.html | 2 +- .../new_dual_brain/scripts/timeseriesD3.js | 969 ------------------ 3 files changed, 302 insertions(+), 1103 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 10fb5fd66..fadcf3707 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -33,6 +33,19 @@ /* global tv, d3 */ +//added globals for time selection +var timeselection_interval_length = 0;//integer +var timeselection_interval = 0; +var timeselection = []; + +// identify the initiator of the change of the time selection: brushing or movie timeline +var triggered_by_timeselection = true; +//store the unmapped selection value used to animate the time selection window +var selection_x = []; + +//store the energy calculated from the time selection +var timeselection_energy = []; + tv = {}; tv.util = { @@ -90,7 +103,16 @@ tv.util = { //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. // Method called is from time_series.py. $.getJSON(readDataURL, callback); + }, + + get_time_selection_energy: function (baseURL, slices, callback, channels, currentMode, currentStateVar, timeselectionlLength) { + var readDataURL = readDataEnergyURL(baseURL, slices[0].lo, slices[0].hi, + currentStateVar, currentMode, slices[0].di, JSON.stringify(channels), timeselectionlLength); + //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. + // Method called is from time_series.py. + $.getJSON(readDataURL, callback) } + }; tv.ndar = function (data) { @@ -529,8 +551,8 @@ tv.plot = { return f; }, + //time sereis uses d3v5 time_series: function () { - var f = function (root) { f.p(f.p() || 0.1); // pad @@ -562,9 +584,13 @@ tv.plot = { f.render(); }; // end function f() + f.energy_callback = function (data) { + timeselection_energy = data; + changeCubicalMeasurePoints_energy(); + }; + f.render = function () { f.status_line.text("waiting for data from server..."); - //console.log(f.baseURL(), f.current_slice()) tv.util.get_array_slice(f.baseURL(), f.current_slice(), f.render_callback, f.channels(), f.mode(), f.state_var()); }; @@ -633,8 +659,8 @@ tv.plot = { f.pad = {x: (0 ? f.w() : f.h()) * f.p(), y: f.h() * f.p()}; f.ul_ctx_y = {x: f.pad.x, y: f.pad.y}; f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; - f.ul_ctx_x = {x: 2 * f.pad.x + f.sz_ctx_y.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; - f.sz_ctx_x = {x: f.w() - 3 * f.pad.x - f.sz_ctx_y.x, y: f.pad.y}; + f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; + f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y / 2}; f.ul_fcs = {x: f.ul_ctx_x.x, y: f.ul_ctx_y.y}; f.sz_fcs = {x: f.sz_ctx_x.x, y: f.sz_ctx_y.y}; @@ -655,13 +681,13 @@ tv.plot = { }) .attr("x", f.w() - f.pad.x / 2).attr("y", f.h() - f.pad.y / 2) .attr("width", f.pad.x / 2).attr("height", f.pad.y / 2) - .call(d3.behavior.drag().on("drag", function () { + .call(d3.drag().on("drag", function () { var p1 = d3.mouse(svg.node()) , p2 = resize_start , scl = {x: p1[0] / p2[0], y: p1[1] / p2[1]}; rgp.attr("transform", "scale(" + scl.x + ", " + scl.y + ")"); svg.attr("width", scl.x * f.w()).attr("height", scl.y * f.h()); - }).on("dragstart", function () { + }).on("start", function () { resize_start = d3.mouse(rgp.node()); })); }; @@ -677,21 +703,49 @@ tv.plot = { , sh = ev.shiftKey , dr = !!(da > 0); - //console.log(ev) if (sh) { f.magic_fcs_amp_scl *= dr ? 1.2 : 1 / 1.2; // TODO scale transform instead via direct access... f.prepare_data(); f.render_focus(); } else { - if (!(f.br_ctx_y.empty())) { - var ext = f.br_ctx_y.extent(); + if (!(f.gp_br_fcs.node().__brush === null)) { var dx = dr ? 1 : -1; - f.br_ctx_y.extent([ext[0] + dx, ext[1] + dx]); - f.br_ctx_y_fn(); + // stop scrolling if it is the end of the signals' list + if (f.dom_y[0] >= -1 && f.dom_y[1] <= f.channels().length) { + f.dom_y[0] += dx; + f.dom_y[1] += dx; + } + //lower bound + else if (f.dom_y[0] < -1) { + var delta = Math.abs(f.dom_y[0] - (-1)); + f.dom_y[0] += delta; + f.dom_y[1] += delta; + } + //upper bound + else if (f.dom_y[1] > f.channels().length) { + var delta = Math.abs(f.channels().length - f.dom_y[1]); + f.dom_y[0] -= delta; + f.dom_y[1] -= delta; + } + + //redraw the lines + var dom = f.dom_y; + var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; + f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); + f.gp_ax_fcs_y.call(f.ax_fcs_y); + f.gp_lines.selectAll("g").attr("transform", function (d, i) { + return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" + }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); + f.scale_focus_stroke(); + + } + + } + }; f.signal_tick_labeler = function (tick_value) { @@ -702,39 +756,40 @@ tv.plot = { f.do_scaffolding = function (rgp) { // main groups for vertical and horizontal context areas and focus area - f.gp_ctx_y = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_y.x + ", " + f.ul_ctx_y.y + ")"); f.gp_ctx_x = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_x.x + ", " + f.ul_ctx_x.y + ")"); + f.gp_ctx_x.append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y).classed("tv-data-bg", true); + f.gp_fcs = rgp.append("g").attr("transform", "translate(" + f.ul_fcs.x + ", " + f.ul_fcs.y + ")"); f.gp_fcs.on("mousewheel", f.mouse_scroll); f.gp_fcs.append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y).classed("tv-data-bg", true); - f.gp_ctx_x.append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y).classed("tv-data-bg", true); - f.gp_ctx_y.append("rect").attr("width", f.sz_ctx_y.x).attr("height", f.sz_ctx_y.y).classed("tv-data-bg", true); + // the plotted time series in the focus and x ctx area are subject to a clipping region new_clip_path(rgp, "fig-lines-clip").append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y); - new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); + // new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); // group with clip path applied for the focus lines f.gp_lines = f.gp_fcs.append("g").attr("style", "clip-path: url(#fig-lines-clip)") .append("g").classed("line-plot", true); // scales for vertical and horizontal context, and the x and y axis of the focus area - f.sc_ctx_y = d3.scale.linear().domain([-1, f.shape()[2]]).range([f.sz_ctx_y.y, 0]); - f.sc_ctx_x = d3.scale.linear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_ctx_x.x]); - f.sc_fcs_x = d3.scale.linear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_fcs.x]); - f.sc_fcs_y = d3.scale.linear().domain([-1, f.shape()[2] + 1]).range([f.sz_fcs.y, 0]); + f.sc_ctx_y = d3.scaleLinear().domain([-1, f.shape()[2]]).range([f.sz_ctx_y.y, 0]); + f.sc_ctx_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_ctx_x.x]); + f.sc_fcs_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_fcs.x]); + f.sc_fcs_y = d3.scaleLinear().domain([-1, f.shape()[2] + 1]).range([f.sz_fcs.y, 0]); + + + f.dom_x = f.sc_ctx_x.domain(); + f.dom_y = f.sc_ctx_y.domain(); // axes for each of the above scales - f.ax_ctx_y = d3.svg.axis().orient("left").scale(f.sc_ctx_y); - f.ax_ctx_x = d3.svg.axis().orient("bottom").scale(f.sc_ctx_x); - f.ax_fcs_x = d3.svg.axis().orient("top").scale(f.sc_fcs_x); - f.ax_fcs_y = d3.svg.axis().orient("left").scale(f.sc_fcs_y); + f.ax_ctx_x = d3.axisBottom(f.sc_ctx_x); + f.ax_fcs_x = d3.axisTop(f.sc_fcs_x); + f.ax_fcs_y = d3.axisLeft(f.sc_fcs_y); f.ax_fcs_y.tickFormat(f.signal_tick_labeler); - f.ax_ctx_y.tickFormat(f.signal_tick_labeler); // groups for each of the above axes - f.gp_ax_ctx_y = f.gp_ctx_y.append("g").classed("axis", true).call(f.ax_ctx_y); f.gp_ax_ctx_x = f.gp_ctx_x.append("g").classed("axis", true).call(f.ax_ctx_x) .attr("transform", "translate(0, " + f.sz_ctx_x.y + ")"); f.gp_ax_fcs_x = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_x); @@ -768,6 +823,7 @@ tv.plot = { da_lines[sig_idx][tt_idx] *= -1; } + da_lines[sig_idx] = {sig: da_lines[sig_idx], id: sig_idx}; } @@ -799,7 +855,7 @@ tv.plot = { } } - // scale average siganl by ptp + // scale average signal by ptp var _dar = new tv.ndar(da_x); var da_max = _dar.max() , da_min = _dar.min() @@ -844,7 +900,10 @@ tv.plot = { return d.id; }); + if (!f.we_are_setup) { + + f.line_paths = g.enter() .append("g") .attr("transform", function (d, i) { @@ -854,76 +913,26 @@ tv.plot = { .attr("vector-effect", "non-scaling-stroke"); } - g.select("path").attr("d", function (d) { - return d3.svg.line() - .x(function (dd, i) { + + f.line_paths.attr("d", function (d) { + return d3.line() + .x(function (d, i) { return f.sc_ctx_x(ts.data[i]); }) - .y(function (dd) { - return dd; + .y(function (d) { + return d; }) (d.sig); }); + + }; f.render_contexts = function () { - var ts = f.ts(); + // originally used to draw context lines and average + - // horizontal context line - var f1 = f.gp_ctx_x.append("g").attr("style", "clip-path: url(#fig-ctx-x-clip)"); - var f2 = f1.selectAll("g").data([f.da_x]).enter(); - var f3 = f2.append("g") - .attr("transform", function () { - return "translate(0, " + (f.sz_ctx_x.y / 2) + ") scale(1, 0.5)"; - }) - .classed("tv-ctx-line", true); - var f4 = f3.append("path") - .attr("d", d3.svg.line() - .x(function (d, i) { - var time_start = f.sc_ctx_x.domain()[0]; - return f.sc_ctx_x((time_start + i + 0.5) * f.da_x_dt); - }) - .y(function (d) { - return d * f.sz_ctx_x.y; - })); - - // error on context line - // TODO the data for this path needs to be re done so that it traces above and below - // the mean line. - var da_x_len = f.da_x.length; - - f.gp_ctx_x.append("g").attr("style", "clip-path: url(#fig-ctx-x-clip)") - .selectAll("g").data([f.da_x.concat(f.da_x.slice().reverse())]) - .enter() - .append("g").attr("transform", "translate(0, " + f.sz_ctx_x.y / 2 + ") scale(1, 0.5)") - .classed("tv-ctx-error", true) - .append("path") - .attr("d", d3.svg.line() - .x(function (d, i) { - var idx = (i < da_x_len) ? i : (2 * da_x_len - i); - var time_start = f.sc_ctx_x.domain()[0]; - return f.sc_ctx_x((time_start + idx) * f.da_x_dt); - }) - .y(function (d, i) { - var std = (i < da_x_len) ? f.da_xs[i] : -f.da_xs[2 * da_x_len - i - 1]; - return f.sz_ctx_x.y * (d + std); - })); - - // vertical context lines - f.gp_ctx_y.append("g").selectAll("g").data(f.da_y) - .enter() - .append("g").attr("transform", function (d, i) { - return "translate(0, " + f.sc_ctx_y(i) + ")"; - }) - .classed("tv-ctx-line", true) - .append("path") - .attr("d", d3.svg.line().x(function (d, i) { - return 2 + (f.sz_ctx_y.x - 2) * i / f.sz_ctx_y.x; - }) - .y(function (d) { - return d; - })); }; f.scale_focus_stroke = function () { @@ -943,94 +952,252 @@ tv.plot = { } }; + f.add_brushes = function () { // horizontal context brush var br_ctx_x_fn = function () { - var dom = f.br_ctx_x.empty() ? f.sc_ctx_x.domain() : f.br_ctx_x.extent() - , sc = f.sc_fcs_x - , x_scaling = f.sc_ctx_x.domain()[1] / (dom[1] - dom[0]); + var event_selection_x = []; + // Different extent when it is: + //1.from the brush of 2D Focus Brush + if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection_x[0] = d3.event.selection[0][0]; + event_selection_x[1] = d3.event.selection[1][0]; + } + //2.from the end of focus brush + else if (d3.event.selection == null) { + event_selection_x = [f.sc_ctx_x.range()[0], f.sc_ctx_x.range()[1]]; + f.dom_x = [f.t0(), f.t0() + f.dt() * f.shape()[0]]; + } + //3.from itself + else { + event_selection_x = d3.event.selection; + } + + + var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); + + + //selection is now in coordinates and we have to map it using scales + event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); + + + dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; + + f.dom_x = dom; + + sc = f.sc_fcs_x; + x_scaling = scale_brushed.domain()[1] / (dom[1] - dom[0]); sc.domain(dom); + f.sc_ctx_x.domain(dom); f.gp_ax_fcs_x.call(f.ax_fcs_x); + f.gp_ax_ctx_x.call(f.ax_ctx_x); + + // TODO: This seems to cause problems with negative values and commenting it out does not seem to // cause any additional problems. This could do with some double checking. f.gp_lines.attr("transform", "translate(" + sc(0) + ", 0) scale(" + x_scaling + ", 1)"); } - // vertical context brush + // vertical changes , br_ctx_y_fn = function () { - var dom = f.br_ctx_y.empty() ? f.sc_ctx_y.domain() : f.br_ctx_y.extent(); + + var event_selection_y = []; + + if (d3.event == null || d3.event.selection == null) { + event_selection_y = f.sc_ctx_y.range(); + f.dom_y = [-1, f.shape()[2]]; + } + else if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection_y[1] = d3.event.selection[0][1]; + event_selection_y[0] = d3.event.selection[1][1]; + } + else { + event_selection_y[0] = d3.event.selection[1]; + event_selection_y[1] = d3.event.selection[0]; + } + + var scale_brushed = d3.scaleLinear().domain(f.dom_y).range(f.sc_ctx_y.range()); + + + event_selection_y = event_selection_y.map(scale_brushed.invert, scale_brushed); + var dom = f.br_ctx_y === null ? f.sc_ctx_y.domain() : event_selection_y; + f.dom_y = dom; var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; - f.sc_fcs_y.domain(dom); + f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); f.gp_ax_fcs_y.call(f.ax_fcs_y); f.gp_lines.selectAll("g").attr("transform", function (d, i) { return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); - } - , br_ctx_end = function () { - f.scale_focus_stroke(); - // TODO: This f.render() only makes another call to the server bringing back data that is already here. - // Also when rerendering that data it is rendered from the 0 timeline, so if for example you - // zoom on a block from 250-270, 20 points or data ar brought from the server and the plot is - // redrawn with data from 0-20, but you being in the area 250-270 don't see any of it. This needs - // to be tested further to see if it was really needed, and if so look into the problem of zooming - // described above. - - // f.render(); }; f.br_ctx_y_fn = br_ctx_y_fn; + br_ctx_end = function () { + + //get the selected time range + var event_selection_x = []; + if (d3.event.selection != null) { + event_selection_x[0] = d3.event.selection[0]; + event_selection_x[1] = d3.event.selection[1]; + selection_x = event_selection_x; + } + event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); + timeselection = event_selection_x; + + // remove the last time's selection + f.gp_ctx_x.selectAll(".selected-time").remove(); + + //change the actual time point in the slider + if (d3.event.selection != null) { + f.timeselection_update_fn(triggered_by_timeselection) + } + + + }; + // on end of focus brush // this is on f so that f can call it when everything else is done.. f.br_fcs_endfn = function (no_render) { - + if (!d3.event || !d3.event.sourceEvent) { + br_ctx_y_fn(); + f.scale_focus_stroke(); + return; + } br_ctx_x_fn(); br_ctx_y_fn(); + f.gp_br_fcs.node().__brush.selection = null; + f.gp_br_fcs.call(f.br_fcs); f.scale_focus_stroke(); - f.gp_br_fcs.call(f.br_fcs.clear()); - if (!no_render) { - // TODO: This f.render() only makes another call to the server bringing back data that is already here. - // Also when rerendering that data it is rendered from the 0 timeline, so if for example you - // zoom on a block from 250-270, 20 points or data ar brought from the server and the plot is - // redrawn with data from 0-20, but you being in the area 250-270 don't see any of it. This needs - // to be tested further to see if it was really needed, and if so look into the problem of zooming - // described above. - //f.render(); - } }; - // focus brush - f.br_fcs_brush = function () { - var ex = f.br_fcs.extent(); - f.br_ctx_x.extent([ex[0][0], ex[1][0]]); - f.br_ctx_y.extent([ex[0][1], ex[1][1]]); - f.gp_br_ctx_y.call(f.br_ctx_y); - f.gp_br_ctx_x.call(f.br_ctx_x); - }; - // create brushes - f.br_ctx_x = d3.svg.brush().x(f.sc_ctx_x).on("brush", br_ctx_x_fn) - .on("brushend", br_ctx_end); + f.br_fcs_startfn = function () { + // we will use the left upper of the brush to do a tooltip + + //select a channel + var event_selection_y = []; + event_selection_y[1] = d3.event.selection[0][1]; + event_selection_y = event_selection_y.map(f.sc_ctx_y.invert); + + //choose the time point + var event_selection_x = []; + event_selection_x[1] = d3.event.selection[0][0]; + event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); + if (event_selection_x[1] < 0) { + event_selection_x[1] = 0 + f.da_x; + } + - f.br_ctx_y = d3.svg.brush().y(f.sc_ctx_y).on("brush", br_ctx_y_fn) - .on("brushend", br_ctx_end); + timerange = f.sc_fcs_x.domain()[1]; + channelID = parseInt(event_selection_y[1]); + timepoint_length = f.da_lines[channelID].sig.length; - f.br_fcs = d3.svg.brush().x(f.sc_fcs_x) - .y(f.sc_fcs_y).on("brushend", f.br_fcs_endfn) + timepoint = event_selection_x[1] / f.sc_fcs_x.domain()[1]; + timepoint = timepoint * timepoint_length; + timepoint = parseInt(timepoint); + + valuearray = f.ys().data; + channel_number = f.channels().length; + channel_index = f.channels().indexOf(channelID); + + //print out the channel name(label) and value + $("#info-channel").html(' ' + f.labels()[parseInt(event_selection_y[1])]); + $("#info-time").html(" " + timepoint); + $("#info-value").html(" " + valuearray[channel_number * timepoint + channel_index]); + + } + + + // create brushes + f.br_ctx_x = d3.brushX().extent([[f.sc_ctx_x.range()[0], 0], [f.sc_ctx_x.range()[1], f.sz_ctx_x.y]]).on("end", br_ctx_end); + f.br_fcs = d3.brush().extent([[f.sc_fcs_x.range()[0], 0], [f.sc_fcs_x.range()[1], f.sz_fcs.y]]) + .on("end", f.br_fcs_endfn).on("start", f.br_fcs_startfn) .on("brush", f.br_fcs_brush); - // add brush groups and add brushes to them - f.gp_br_ctx_y = f.gp_ctx_y.append("g"); + // add time selection brush group f.gp_br_ctx_x = f.gp_ctx_x.append("g"); + //add title for the time selection area + f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10); + f.gp_br_ctx_x.classed("brush", true).attr("class", "time-selection-brush").call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); + + + //add main focus brush group f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); - f.gp_br_ctx_y.append("g").classed("brush", true).call(f.br_ctx_y).selectAll("rect").attr("width", f.sz_ctx_y.x); - f.gp_br_ctx_x.append("g").classed("brush", true).call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); + + }; + + + //functions for the time selection window + f.timeselection_update_fn = function (triggered) { + + //display the selected time range + f.text_timeselection_range = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") + .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); + f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval") + .text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2) + " ms").attr("x", 100).attr("y", -10); + + if (triggered) { + timeselection_interval = timeselection[1] - timeselection[0]; + timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; + //call the energy computation method + tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + //update the time in the input tag + d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); + //update the time in the 3d viewer's time + var time_index=parseInt((timeselection[0]-f.t0())/f.dt()); + $('#slider').slider('value', time_index); + loadFromTimeStep(parseInt(timeselection[0])); + } + }; + + //move the time selection window with the slider + f.timeselection_move_fn = function () { + redrawSelection() + }; + + + //TODO need to fix one additional step brought by any change + function redrawSelection() { + if (parseInt(timeselection[1]) == parseInt(f.sc_ctx_x.domain()[1])) { + f.jump_to_next_time_range() + } + if (timeStepsPerTick > 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); + } + else if (timeStepsPerTick < 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * 1 / (1 / timeStepsPerTick + 1), timeselection[1] + f.dt() * 1 / (1 / timeStepsPerTick + 1)].map(f.sc_ctx_x)); + } + else if (timeStepsPerTick === 1) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt(), timeselection[1] + f.dt()].map(f.sc_ctx_x)); + } + } + + f.jump_to_next_time_range = function(){ + var time_data_length=f.shape()[0]; + var current_slice_length=f.current_slice()[0].hi-f.current_slice()[0].lo; + if(f.current_slice()[0].hi+current_slice_length - + diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js deleted file mode 100644 index 8eec6f4c5..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseriesD3.js +++ /dev/null @@ -1,969 +0,0 @@ -/** - * TheVirtualBrain-Framework Package. This package holds all Data Management, and - * Web-UI helpful to run brain-simulations. To use it, you also need do download - * TheVirtualBrain-Scientific Package (for simulators). See content of the - * documentation-folder for more details. See also http://www.thevirtualbrain.org - * - * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others - * - * This program is free software: you can redistribute it and/or modify it under the - * terms of the GNU General Public License as published by the Free Software Foundation, - * either version 3 of the License, or (at your option) any later version. - * This program is distributed in the hope that it will be useful, but WITHOUT ANY - * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A - * PARTICULAR PURPOSE. See the GNU General Public License for more details. - * You should have received a copy of the GNU General Public License along with this - * program. If not, see . - * - **/ - -/* - - tv.js should dump just a single public var named tv (T.VB V.isualizations) - - tv = {} - - with - - tv.ndar array fun - tv.plot reusable plotting components - tv.util utility stuff - - */ - -/* global tv, d3 */ - -//added globals for time selection -var timeselection_interval_length = 0;//integer -var timeselection_interval = 0; -var timeselection = []; - -// identify the initiator of the change of the time selection: brushing or movie timeline -var triggered_by_timeselection = true; -//store the unmapped selection value used to animate the time selection window -var selection_x = []; - -//store the energy calculated from the time selection -var timeselection_energy = []; - -tv = {}; - -tv.util = { - - // d3 style configurator. if this is slow, interp and eval source - gen_access: function (obj, field) { - return function (maybe) { - if (maybe === undefined) { - return obj["_" + field]; - } else { - obj["_" + field] = maybe; - return obj; - } - }; - }, - - // helper to add usage notes to plots - usage: function (root, heading, notes) { - const p = root.append("p"); - p.classed("slice-info", true); - p.append("h3").classed("instructions", true).text(heading); - p.append("ul").selectAll("li").data(notes) - .enter().append("li").classed("instructions", true).text(function (d) { - return d; - }); - }, - - ord_nums: ["zeroeth", "first", "second", "third", "fourth", "fifth", "sixth", "seventh", "eighth", "ninth", "tenth", - "eleventh", "twelfth", "thirteenth", "fourteenth", "fifteenth", "sixteenth", "seventeenth", "eighteenth", "nineteenth"], - - /* f is a templater/formatter cf. https://gist.github.com/984375 */ - fmt: function (f) { // fhe format specifier followed by any number of arguments - - var a = arguments; // store outer arguments - return ("" + f) // force format specifier to String - .replace( // replace tokens in format specifier - /\{(?:(\d+)|(\w+))\}/g, // match {token} references - function (s, // the matched string (ignored) - i, // an argument index - p // a property name - ) { - return p && a[1] // if property name and first argument exist - ? a[1][p] // return property from first argument - : a[i]; // assume argument index and return i-th argument - }); - }, - - get_array_shape: function (baseURL, callback) { - $.getJSON(baseURL + "/read_data_shape/False?kwd=0", callback); - }, - - get_array_slice: function (baseURL, slices, callback, channels, currentMode, currentStateVar) { - var readDataURL = readDataChannelURL(baseURL, slices[0].lo, slices[0].hi, - currentStateVar, currentMode, slices[0].di, JSON.stringify(channels)); - //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. - // Method called is from time_series.py. - $.getJSON(readDataURL, callback); - }, - - get_time_selection_energy: function (baseURL, slices, callback, channels, currentMode, currentStateVar, timeselectionlLength) { - var readDataURL = readDataEnergyURL(baseURL, slices[0].lo, slices[0].hi, - currentStateVar, currentMode, slices[0].di, JSON.stringify(channels), timeselectionlLength); - //NOTE: If we need to add slices for the other dimensions pass them as the 'specific_slices' parameter. - // Method called is from time_series.py. - $.getJSON(readDataURL, callback) - } -}; - -tv.ndar = function (data) { - - this.data = data; - - this.imap = function (f) { - for (var i = 0; i < this.data.length; i++) { - this.data[i] = f(this.data[i]); - } - return this; - }; - - this.map = function (f) { - return (new tv.ndar(this.data.slice())).imap(f); - }; - - this.reduce = function (f, init) { - for (var i = 0; i < this.data.length; i++) { - init = f(init, this.data[i]); - } - return init; - }; - - this.max = function () { - return this.reduce(function (l, r) { - return l > r ? l : r; - }, -1e300); - }; - - this.min = function () { - return this.reduce(function (l, r) { - return l < r ? l : r; - }, 1e300); - }; - - this.sum = function () { - return this.reduce(function (l, r) { - return l + r; - }, 0); - }; - - this.mean = function () { - return this.sum() / this.length(); - }; - - this.std = function () { - var mean_sqr = this.map(function (x) { - return x * x; - }).mean(), - mean = this.mean(); - return Math.sqrt(mean_sqr - mean * mean); - }; - - this.add = function (b) { - return this.map(function (x) { - return x + b; - }); - }; - - this.sub = function (b) { - return this.add(-b); - }; - - this.mul = function (b) { - return this.map(function (x) { - return x * b; - }); - }; - - this.imul = function (b) { - return this.imap(function (x) { - return x * b; - }); - }; - - this.idiv = function (b) { - return this.imul(1 / b); - }; - - this.div = function (b) { - return this.mul(1 / b); - }; - - this.get = function (i) { - return this.data[i]; - }; - - this.set = function (i, val) { - this.data[i] = val; - }; - - this.nd2lin = function (idx) { - var l = 0; - for (var i = 0; i < idx.length; i++) { - l += this.strides[i] * idx[i]; - } - return l; - }; - - this.length = function () { - return this.data.length; - }; - - // return indices where condition is true - this.where = function (f) { - var indices = []; - for (var i = 0; i < this.data.length; i++) { - if (f(this.data[i], i)) { - indices.push(i); - } - } - return indices; - }; - - this.pretty_step = function (base) { - return Math.pow(base, Math.floor(-1 + Math.log(this.max() - this.min()) / Math.log(base))); - }; - - this.pretty_ticks = function (base) { - var d = this.pretty_step(base || 10), f = Math.floor; - return tv.ndar.range(f(this.min() / d) * d, (f(this.max() / d) + 1) * d, d); - }; - - this.pretty_ticklabels = function (base) { - return this.pretty_ticks(base).map(function (d) { - return d.toPrecision(2); - }); - }; - - this.normalized = function () { - var mn = this.min(), mx = this.max(); - return this.map(function (d) { - return (d - mn) / (mx - mn); - }); - }; - - this.slice = function (lo, hi) { - return tv.ndar.from(this.data.slice(lo, hi)); - }; - -}; - -tv.ndar.from = function (src) { - return new tv.ndar(src); -}; - -tv.ndar.ndfrom = function (src) { - var a = tv.ndar.from(src.data); - a.shape = src.shape; - a.strides = src.strides; - return a; -}; - -tv.ndar.range = function (a, b, c) { - var lo, hi, dx; - - if ((a || a === 0) && b) { - if (c) { - dx = c; - } - else { - dx = 1; - } - lo = a; - hi = b; - } else { - hi = a; - lo = 0; - dx = 1; - } - - var end = Math.floor((hi - lo) / dx); - var ar = new tv.ndar([]); - for (var i = 0; i < end; i++) { - ar.data[i] = dx * i + lo; - } - return ar; - -}; - -tv.ndar.zeros = function (n) { - return tv.ndar.range(n).imap(function () { - return 0.0; - }); -}; - -tv.ndar.ones = function (n) { - return tv.ndar.zeros(n).add(1.0); -}; - - -tv.plot = { - - time_series: function () { - - var f = function (root) { - - f.p(f.p() || 0.1); // pad - f.w(f.w() || 700); - f.h(f.h() || 500); - f.point_limit(f.point_limit() || 500); - - f.magic_fcs_amp_scl = 1; - - // make sure we got numbers not strings - f.dt(+f.dt()); - f.t0(+f.t0()); - - // Create the required UI elements. - var svg = root.append("svg").attr("width", f.w()).attr("height", f.h()); - var rgp = svg.append("g").attr("transform", "scale(1, 1)"); - - rgp.append("g").append("rect").attr("width", f.w()).attr("height", f.h()).classed("tv-fig-bg", true); - - f.status_line = svg.append("g").attr("transform", "translate(10, " + (f.h() - 10) + ")").append("text"); - - // parts independent of data - f.compute_layout(); - f.add_resizer(svg, rgp); - f.do_scaffolding(rgp); - - // inversion of flow control in progress - f.we_are_setup = false; - f.render(); - }; // end function f() - - f.energy_callback = function (data) { - timeselection_energy = data; - changeCubicalMeasurePoints_energy(); - }; - - f.render = function () { - f.status_line.text("waiting for data from server..."); - tv.util.get_array_slice(f.baseURL(), f.current_slice(), f.render_callback, f.channels(), f.mode(), f.state_var()); - }; - - f.render_callback = function (data) { - - var kwd = kwd || {}; - - f.status_line.text("handling data..."); - - /* reformat data into normal ndar style */ - var flat = [] - , sl = f.current_slice()[0] - , shape = [(sl.hi - sl.lo) / sl.di, f.shape()[2]] - , strides = [f.shape()[2], 1]; - - for (var i = 0; i < shape[0]; i++) { - for (var j = 0; j < shape[1]; j++) { - flat.push(data[i][j]); - } - } - - var ts = [], t0 = f.t0(), dt = f.dt(); - - for (var ii = 0; ii < shape[0]; ii++) { - ts.push(t0 + dt * sl.lo + ii * dt * sl.di); - } - - f.ts(tv.ndar.ndfrom({data: ts, shape: [shape[0]], strides: [1]})); - f.ys(tv.ndar.ndfrom({data: flat, shape: shape, strides: strides})); - - f.status_line.text("examining data..."); - f.prepare_data(); - f.status_line.text("rendering data..."); - f.render_focus(); - - if (!f.we_are_setup) { - f.render_contexts(); - f.add_brushes(); - f.br_fcs_endfn(true); // no_render=true - f.we_are_setup = true; - } - - f.status_line.text(""); - }; - - f.current_slice = function () { - var dom = f.sc_fcs_x.domain() - , lo = Math.floor((dom[0] - f.t0()) / f.dt()) - , hi = Math.floor((dom[1] - f.t0()) / f.dt()) - , di = Math.floor((hi - lo) / (2 * f.point_limit())); - - di = di === 0 ? 1 : di; - - if (lo > f.shape()[0]) { - console.log("time_series.current_slice(): found lo>shape[0]: " + lo + ">" + f.shape()[0]); - lo = f.shape()[0]; - } - - return [{lo: lo, hi: hi, di: di}]; - }; - - - // dimensions and placement of focus and context areas - f.compute_layout = function () { - // pad is only provisionally basis for dimensioning the context areas; later - // we will need to have inner and outer pad - f.pad = {x: (0 ? f.w() : f.h()) * f.p(), y: f.h() * f.p()}; - f.ul_ctx_y = {x: f.pad.x, y: f.pad.y}; - f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; - f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; - f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y / 2}; - f.ul_fcs = {x: f.ul_ctx_x.x, y: f.ul_ctx_y.y}; - f.sz_fcs = {x: f.sz_ctx_x.x, y: f.sz_ctx_y.y}; - - }; - - // allows user to scale plot size dynamically - // TODO refactor place in tv.util - f.add_resizer = function (svg, rgp) { - - var resize_start; - - rgp.append("g").append("rect").classed("tv-resizer", true) - .on("mouseover", function () { - rgp.attr("style", "cursor: se-resize"); - }) - .on("mouseout", function () { - rgp.attr("style", ""); - }) - .attr("x", f.w() - f.pad.x / 2).attr("y", f.h() - f.pad.y / 2) - .attr("width", f.pad.x / 2).attr("height", f.pad.y / 2) - .call(d3.drag().on("drag", function () { - var p1 = d3.mouse(svg.node()) - , p2 = resize_start - , scl = {x: p1[0] / p2[0], y: p1[1] / p2[1]}; - rgp.attr("transform", "scale(" + scl.x + ", " + scl.y + ")"); - svg.attr("width", scl.x * f.w()).attr("height", scl.y * f.h()); - }).on("start", function () { - resize_start = d3.mouse(rgp.node()); - })); - }; - - // TODO migrate to tv.util - var new_clip_path = function (el, id) { - return el.append("defs").append("clipPath").attr("id", id); - }; - - f.mouse_scroll = function () { - var ev = window.event - , da = ev.detail ? ev.detail : ev.wheelDelta - , sh = ev.shiftKey - , dr = !!(da > 0); - - if (sh) { - f.magic_fcs_amp_scl *= dr ? 1.2 : 1 / 1.2; - // TODO scale transform instead via direct access... - f.prepare_data(); - f.render_focus(); - } else { - if (!(f.gp_br_fcs.node().__brush === null)) { - var dx = dr ? 1 : -1; - // stop scrolling if it is the end of the signals' list - if (f.dom_y[0] >= -1 && f.dom_y[1] <= f.channels().length) { - f.dom_y[0] += dx; - f.dom_y[1] += dx; - } - //lower bound - else if (f.dom_y[0] < -1) { - var delta = Math.abs(f.dom_y[0] - (-1)); - f.dom_y[0] += delta; - f.dom_y[1] += delta; - } - //upper bound - else if (f.dom_y[1] > f.channels().length) { - var delta = Math.abs(f.channels().length - f.dom_y[1]); - f.dom_y[0] -= delta; - f.dom_y[1] -= delta; - } - - //redraw the lines - var dom = f.dom_y; - var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; - f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); - f.gp_ax_fcs_y.call(f.ax_fcs_y); - f.gp_lines.selectAll("g").attr("transform", function (d, i) { - return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" - }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); - f.scale_focus_stroke(); - - - } - - - } - - - }; - - f.signal_tick_labeler = function (tick_value) { - return (tick_value % 1 === 0) ? f.labels()[tick_value] : ""; - }; - - // setup groups, scales and axes for context and focus areas - f.do_scaffolding = function (rgp) { - - // main groups for vertical and horizontal context areas and focus area - f.gp_ctx_x = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_x.x + ", " + f.ul_ctx_x.y + ")"); - f.gp_ctx_x.append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y).classed("tv-data-bg", true); - - f.gp_fcs = rgp.append("g").attr("transform", "translate(" + f.ul_fcs.x + ", " + f.ul_fcs.y + ")"); - f.gp_fcs.on("mousewheel", f.mouse_scroll); - f.gp_fcs.append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y).classed("tv-data-bg", true); - - - // the plotted time series in the focus and x ctx area are subject to a clipping region - new_clip_path(rgp, "fig-lines-clip").append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y); - // new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); - - // group with clip path applied for the focus lines - f.gp_lines = f.gp_fcs.append("g").attr("style", "clip-path: url(#fig-lines-clip)") - .append("g").classed("line-plot", true); - - // scales for vertical and horizontal context, and the x and y axis of the focus area - f.sc_ctx_y = d3.scaleLinear().domain([-1, f.shape()[2]]).range([f.sz_ctx_y.y, 0]); - f.sc_ctx_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_ctx_x.x]); - f.sc_fcs_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_fcs.x]); - f.sc_fcs_y = d3.scaleLinear().domain([-1, f.shape()[2] + 1]).range([f.sz_fcs.y, 0]); - - - f.dom_x = f.sc_ctx_x.domain(); - f.dom_y = f.sc_ctx_y.domain(); - - // axes for each of the above scales - f.ax_ctx_x = d3.axisBottom(f.sc_ctx_x); - f.ax_fcs_x = d3.axisTop(f.sc_fcs_x); - f.ax_fcs_y = d3.axisLeft(f.sc_fcs_y); - - f.ax_fcs_y.tickFormat(f.signal_tick_labeler); - - // groups for each of the above axes - f.gp_ax_ctx_x = f.gp_ctx_x.append("g").classed("axis", true).call(f.ax_ctx_x) - .attr("transform", "translate(0, " + f.sz_ctx_x.y + ")"); - f.gp_ax_fcs_x = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_x); - f.gp_ax_fcs_y = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_y); - - }; - - f.prepare_data = function () { - - var ts = f.ts(); - var ys = f.ys(); - var da_lines = []; - var line_avg; - var ys_std = ys.min(); - //To set this properly, we need to know: - // nsig - how many signals on the screen? - // std - std of signals - // pxav - vertical pixels available - - for (var sig_idx = 0; sig_idx < ys.shape[1]; sig_idx++) { - - da_lines[sig_idx] = []; - for (var t_idx = 0; t_idx < ys.shape[0]; t_idx++) { - da_lines[sig_idx][t_idx] = ys.data[ys.strides[0] * t_idx + sig_idx]; - } - - line_avg = d3.mean(da_lines[sig_idx]); - for (var tt_idx = 0; tt_idx < ys.shape[0]; tt_idx++) { - da_lines[sig_idx][tt_idx] = f.magic_fcs_amp_scl * (da_lines[sig_idx][tt_idx] - line_avg) / Math.abs(ys_std); - // multiply by -1 because the y axis points down - da_lines[sig_idx][tt_idx] *= -1; - } - - - da_lines[sig_idx] = {sig: da_lines[sig_idx], id: sig_idx}; - } - - // compute context data - var da_x = [] - , da_xs = [] - , da_y = [] - , ys_mean = ys.mean() - , ys_std = ys.std() - , n_chan = ys.shape[1] - , datum; - - // center an average signal - for (var j = 0; j < ts.shape[0]; j++) { - da_x[j] = 0; - da_xs[j] = 0; - for (var i = 0; i < n_chan; i++) { - datum = ys.data[j * n_chan + i]; - da_x [j] += datum; - da_xs[j] += datum * datum; - } - da_xs[j] = Math.sqrt(da_xs[j] / n_chan - ((da_x[j] / n_chan) * (da_x[j] / n_chan))); - da_x [j] = (da_x[j] / n_chan - ys_mean); - // multiply by -1 because y axis points down - da_x[j] *= -1; - - if ((isNaN(da_x[j])) || (isNaN(da_xs[j]))) { - console.log("encountered NaN in data: da_x[" + j + "] = " + da_x[j] + ", da_xs[" + j + "] = " + da_xs[j] + "."); - } - } - - // scale average signal by ptp - var _dar = new tv.ndar(da_x); - var da_max = _dar.max() - , da_min = _dar.min() - , da_ptp = da_max - da_min; - - for (var si = 0; si < da_x.length; si++) { - da_x[si] = da_x[si] / da_ptp; - } - - // center and scale the std line - da_xs.min = tv.ndar.from(da_xs).min(); - for (var jj = 0; jj < da_xs.length; jj++) { - da_xs[jj] -= da_xs.min; - da_xs[jj] /= ys_std; - // multiply by -1 because y axis points down - da_xs[jj] *= -1; - } - - // center and scale to std each signal - for (var jjj = 0; jjj < n_chan; jjj++) { - da_y[jjj] = []; - // This computes a slice at the beginning of the signal to be displayed on the y axis - // The signal might be shorter than the width hence the min - for (var ii = 0; ii < Math.min(f.sz_ctx_y.x, ys.shape[0]); ii++) { - da_y[jjj][ii] = (ys.data[ii * n_chan + jjj] - ys_mean) / ys_std; - // multiply by -1 because y axis points down - da_y[jjj][ii] *= -1; - } - } - - f.da_lines = da_lines; - f.da_x_dt = f.dt() * f.current_slice()[0].di; - f.da_x = da_x; - f.da_xs = [0, da_xs[da_xs.length - 1]].concat(da_xs, [0]); // filled area needs start == end - f.da_y = da_y; - }; - - f.render_focus = function () { - - var ts = f.ts() - , g = f.gp_lines.selectAll("g").data(f.da_lines, function (d) { - return d.id; - }); - - - if (!f.we_are_setup) { - - - f.line_paths = g.enter() - .append("g") - .attr("transform", function (d, i) { - return "translate(0, " + f.sc_fcs_y(i) + ")"; - }) - .append("path") - .attr("vector-effect", "non-scaling-stroke"); - } - - - f.line_paths.attr("d", function (d) { - return d3.line() - .x(function (d, i) { - return f.sc_ctx_x(ts.data[i]); - }) - .y(function (d) { - return d; - }) - (d.sig); - }); - - - }; - - f.render_contexts = function () { - - // originally used to draw context lines and average - - - }; - - f.scale_focus_stroke = function () { - var total = f.sz_fcs - , xdom = f.sc_fcs_x.domain() - , ydom = f.sc_fcs_y.domain() - , dx = xdom[1] - xdom[0] - , dy = ydom[1] - ydom[0] - , area = dx * dy - , area2 = total.x * total.y; - - //console.log(area / area2); - if (window.navigator.userAgent.indexOf("Edge") > -1) { - f.gp_lines.selectAll("g").selectAll("path").attr("stroke-width", "0.3px");//4*Math.sqrt(Math.abs(area / area2))) - } else { - f.gp_lines.selectAll("g").selectAll("path").attr("stroke-width", "1px");//4*Math.sqrt(Math.abs(area / area2))) - } - }; - - - f.add_brushes = function () { - - // horizontal context brush - var br_ctx_x_fn = function () { - - var event_selection_x = []; - // Different extent when it is: - //1.from the brush of 2D Focus Brush - if (d3.event.selection != null && d3.event.selection[0][0] != null) { - event_selection_x[0] = d3.event.selection[0][0]; - event_selection_x[1] = d3.event.selection[1][0]; - } - //2.from the end of focus brush - else if (d3.event.selection == null) { - event_selection_x = [f.sc_ctx_x.range()[0], f.sc_ctx_x.range()[1]]; - f.dom_x = [f.t0(), f.t0() + f.dt() * f.shape()[0]]; - } - //3.from itself - else { - event_selection_x = d3.event.selection; - } - - - var scale_brushed = d3.scaleLinear().domain(f.dom_x).range(f.sc_ctx_x.range()); - - - //selection is now in coordinates and we have to map it using scales - event_selection_x = event_selection_x.map(scale_brushed.invert, scale_brushed); - - - dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection_x; - - f.dom_x = dom; - - sc = f.sc_fcs_x; - x_scaling = scale_brushed.domain()[1] / (dom[1] - dom[0]); - sc.domain(dom); - f.sc_ctx_x.domain(dom); - f.gp_ax_fcs_x.call(f.ax_fcs_x); - f.gp_ax_ctx_x.call(f.ax_ctx_x); - - - // TODO: This seems to cause problems with negative values and commenting it out does not seem to - // cause any additional problems. This could do with some double checking. - f.gp_lines.attr("transform", "translate(" + sc(0) + ", 0) scale(" + x_scaling + ", 1)"); - } - - // vertical changes - , br_ctx_y_fn = function () { - - var event_selection_y = []; - - if (d3.event == null || d3.event.selection == null) { - event_selection_y = f.sc_ctx_y.range(); - f.dom_y = [-1, f.shape()[2]]; - } - else if (d3.event.selection != null && d3.event.selection[0][0] != null) { - event_selection_y[1] = d3.event.selection[0][1]; - event_selection_y[0] = d3.event.selection[1][1]; - } - else { - event_selection_y[0] = d3.event.selection[1]; - event_selection_y[1] = d3.event.selection[0]; - } - - var scale_brushed = d3.scaleLinear().domain(f.dom_y).range(f.sc_ctx_y.range()); - - - event_selection_y = event_selection_y.map(scale_brushed.invert, scale_brushed); - var dom = f.br_ctx_y === null ? f.sc_ctx_y.domain() : event_selection_y; - f.dom_y = dom; - var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; - f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); - f.gp_ax_fcs_y.call(f.ax_fcs_y); - f.gp_lines.selectAll("g").attr("transform", function (d, i) { - return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" - }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); - - }; - - f.br_ctx_y_fn = br_ctx_y_fn; - - br_ctx_end = function () { - - //get the selected time range - var event_selection_x = []; - if (d3.event.selection != null) { - event_selection_x[0] = d3.event.selection[0]; - event_selection_x[1] = d3.event.selection[1]; - selection_x = event_selection_x; - } - event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); - timeselection = event_selection_x; - - // remove the last time's selection - f.gp_ctx_x.selectAll(".selected-time").remove(); - - //change the actual time point in the slider - if (d3.event.selection != null) { - f.timeselection_update_fn(triggered_by_timeselection) - } - - - }; - - // on end of focus brush - // this is on f so that f can call it when everything else is done.. - f.br_fcs_endfn = function (no_render) { - if (!d3.event || !d3.event.sourceEvent) { - br_ctx_y_fn(); - f.scale_focus_stroke(); - return; - } - br_ctx_x_fn(); - br_ctx_y_fn(); - f.gp_br_fcs.node().__brush.selection = null; - f.gp_br_fcs.call(f.br_fcs); - f.scale_focus_stroke(); - - - }; - - - f.br_fcs_startfn = function () { - // we will use the left upper of the brush to do a tooltip - - //select a channel - var event_selection_y = []; - event_selection_y[1] = d3.event.selection[0][1]; - event_selection_y = event_selection_y.map(f.sc_ctx_y.invert); - - //choose the time point - var event_selection_x = []; - event_selection_x[1] = d3.event.selection[0][0]; - event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); - if (event_selection_x[1] < 0) { - event_selection_x[1] = 0 + f.da_x; - } - - - timerange = f.sc_fcs_x.domain()[1]; - channelID = parseInt(event_selection_y[1]); - timepoint_length = f.da_lines[channelID].sig.length; - - timepoint = event_selection_x[1] / f.sc_fcs_x.domain()[1]; - timepoint = timepoint * timepoint_length; - timepoint = parseInt(timepoint); - - valuearray = f.ys().data; - channel_number = f.channels().length; - channel_index = f.channels().indexOf(channelID); - - //print out the channel name(label) and value - $("#info-channel").html(' ' + f.labels()[parseInt(event_selection_y[1])]); - $("#info-time").html(" " + timepoint); - $("#info-value").html(" " + valuearray[channel_number * timepoint + channel_index]); - - } - - - // create brushes - f.br_ctx_x = d3.brushX().extent([[f.sc_ctx_x.range()[0], 0], [f.sc_ctx_x.range()[1], f.sz_ctx_x.y]]).on("end", br_ctx_end); - f.br_fcs = d3.brush().extent([[f.sc_fcs_x.range()[0], 0], [f.sc_fcs_x.range()[1], f.sz_fcs.y]]) - .on("end", f.br_fcs_endfn).on("start", f.br_fcs_startfn) - .on("brush", f.br_fcs_brush); - - // add time selection brush group - f.gp_br_ctx_x = f.gp_ctx_x.append("g"); - //add title for the time selection area - f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10); - f.gp_br_ctx_x.classed("brush", true).attr("class", "time-selection-brush").call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); - - - //add main focus brush group - f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); - - - }; - - - //functions for the time selection window - f.timeselection_update_fn = function (triggered) { - - //display the selected time range - f.text_timeselection_range = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") - .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); - f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval") - .text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2) + " ms").attr("x", 100).attr("y", -10); - - if (triggered) { - timeselection_interval = timeselection[1] - timeselection[0]; - timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; - //call the energy computation method - tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); - //update the time in the input tag - d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); - //update the time in the 3d viewer's time - var time_index=parseInt((timeselection[0]-f.t0())/f.dt()); - $('#slider').slider('value', time_index); - loadFromTimeStep(parseInt(timeselection[0])); - } - }; - - //move the time selection window with the slider - f.timeselection_move_fn = function () { - redrawSelection() - }; - - - //TODO need to fix one additional step brought by any change - function redrawSelection() { - if (parseInt(timeselection[1]) == parseInt(f.sc_ctx_x.domain()[1])) { - f.jump_to_next_time_range() - } - if (timeStepsPerTick > 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); - } - else if (timeStepsPerTick < 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * 1 / (1 / timeStepsPerTick + 1), timeselection[1] + f.dt() * 1 / (1 / timeStepsPerTick + 1)].map(f.sc_ctx_x)); - } - else if (timeStepsPerTick === 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt(), timeselection[1] + f.dt()].map(f.sc_ctx_x)); - } - } - - f.jump_to_next_time_range = function(){ - var time_data_length=f.shape()[0]; - var current_slice_length=f.current_slice()[0].hi-f.current_slice()[0].lo; - if(f.current_slice()[0].hi+current_slice_length Date: Thu, 26 Jul 2018 17:51:27 +1000 Subject: [PATCH 24/53] TVB-2378 Fix behaviours when reaching the end jump to the next page or jump to the beginning when reaches the end --- tvb/interfaces/web/static/js/tvbviz.js | 43 +++++++++++++++----------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index fadcf3707..0f7492f60 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -551,7 +551,7 @@ tv.plot = { return f; }, - //time sereis uses d3v5 + //time sereis uses d3v5 time_series: function () { var f = function (root) { @@ -1135,12 +1135,10 @@ tv.plot = { //functions for the time selection window f.timeselection_update_fn = function (triggered) { - //display the selected time range - f.text_timeselection_range = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection") - .text("Selected Time Range: " + timeselection[0].toFixed(2) + "ms" + " to " + timeselection[1].toFixed(2) + "ms"); - f.text_interval = f.gp_ctx_x.append("text").attr("class", "selected-time").attr("id", "time-selection-interval") - .text(" Interval:" + (timeselection[1] - timeselection[0]).toFixed(2) + " ms").attr("x", 100).attr("y", -10); + d3.select("#SetIntervalStart").property('value', timeselection[0].toFixed(2)); + d3.select("#SetIntervalEnd").property('value', timeselection[1].toFixed(2)); + $("#info-interval").html((timeselection[1] - timeselection[0]).toFixed(2) + "ms"); if (triggered) { timeselection_interval = timeselection[1] - timeselection[0]; @@ -1150,7 +1148,7 @@ tv.plot = { //update the time in the input tag d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); //update the time in the 3d viewer's time - var time_index=parseInt((timeselection[0]-f.t0())/f.dt()); + var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); $('#slider').slider('value', time_index); loadFromTimeStep(parseInt(timeselection[0])); } @@ -1167,6 +1165,12 @@ tv.plot = { if (parseInt(timeselection[1]) == parseInt(f.sc_ctx_x.domain()[1])) { f.jump_to_next_time_range() } + else if (timeselection[0] >= f.sc_ctx_x.domain()[1] - f.dt()) { + dom = [0, f.t0() + f.dt() * f.shape()[0]]; + f.sc_ctx_x.domain(dom); + f.gp_ax_ctx_x.call(f.ax_ctx_x); + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [0, timeselection[1] - timeselection[0]].map(f.sc_ctx_x)); + } if (timeStepsPerTick > 1) { d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); } @@ -1178,19 +1182,22 @@ tv.plot = { } } - f.jump_to_next_time_range = function(){ - var time_data_length=f.shape()[0]; - var current_slice_length=f.current_slice()[0].hi-f.current_slice()[0].lo; - if(f.current_slice()[0].hi+current_slice_length Date: Thu, 26 Jul 2018 17:52:21 +1000 Subject: [PATCH 25/53] TVB-2379 Merge the gl header template and 3D script --- .../commons/scripts/virtualBrain.js | 124 ++++++++++++++++-- .../new_dual_brain/dual_brain_2d_view.html | 7 +- .../new_dual_brain/dual_brain_3d_view.html | 2 +- .../new_dual_brain/gl_dual_view_header.html | 23 ---- .../scripts/timeseries3DScript.js | 96 ++++++-------- .../genshi/visualizers/time_series/view.html | 2 +- 6 files changed, 161 insertions(+), 93 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index ae4ae99ea..8fc142302 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -127,6 +127,8 @@ var activityMin = 0, activityMax = 0; var isOneToOneMapping = false; var isDoubleView = false; var isEEGView = false; +//apply transparency on the shell surface +var withTransparency = false; var drawingMode; var VS_showLegend = true; var isInternalSensorView = false; @@ -157,9 +159,31 @@ var near = 0.1; // index of the currently selected node. This is equivalent to CONN_pickedIndex var VS_pickedIndex = -1; - +//selected channels used to color the energy spheres +var VS_selectedchannels=[]; var VB_BrainNavigator; +//default time selection time +var timeselection_interval=0; +//indicating we are drawing the energy spheres and applying material colors +var isDrawingSpheres = false; +/** + * Change transparency of cortical surface from user-input. + * + * @param inputField user given input value for transparency of cortical-surface + */ +var _alphaValue = 1; +function changeSurfaceTransparency(inputField) { + var newValue = inputField.value; + + if (!isNaN(parseFloat(newValue)) && isFinite(newValue) && parseFloat(newValue) >= 0 && parseFloat(newValue) <= 1) { + _alphaValue = parseFloat(newValue); + } else { + inputField.value = _alphaValue; + displayMessage("Transparency value should be a number between 0 and 1.", "warningMessage"); + } +} + function VS_init_hemisphere_mask(hemisphere_chunk_mask) { VS_hemisphere_chunk_mask = hemisphere_chunk_mask; @@ -351,6 +375,17 @@ function _VS_init_cubicalMeasurePoints() { } } +function _VS_init_sphereMeasurePoints() { + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } +} + function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelveObject, minMeasure, maxMeasure, urlMeasure, hemisphereChunkMask) { @@ -375,19 +410,24 @@ function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, minActivity, maxActivity, oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, - urlMeasurePointsLabels, boundaryURL, measurePointsSelectionGID) { + urlMeasurePointsLabels, boundaryURL, measurePointsSelectionGID, transparencyStatus) { _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, minActivity, maxActivity, oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, urlMeasurePointsLabels, boundaryURL); - _VS_init_cubicalMeasurePoints(); + _VS_init_sphereMeasurePoints(); if (!isDoubleView) { // If this is a brain activity viewer then we have to initialize the selection component _initChannelSelection(measurePointsSelectionGID); // For the double view the selection is the responsibility of the extended view functions } + withTransparency = transparencyStatus; + //pause by default + AG_isStopped = true; + _alphaValue=0.1; + displayMeasureNodes=true; } function _isValidActivityData() { @@ -543,6 +583,11 @@ function _initSliders() { currentTimeValue = target.value; $('#TimeNow').val(currentTimeValue); }, + change: function (event, ui) { + triggered_by_timeselection = false; + tsView.timeselection_move_fn(); + triggered_by_timeselection = true; + }, stop: function (event, target) { sliderSel = false; loadFromTimeStep(target.value); @@ -913,6 +958,8 @@ function drawBuffer(drawMode, buffers) { setMatrixUniforms(); if (isOneToOneMapping) { SHADING_Context.one_to_one_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); + } else if (isDrawingSpheres) { + SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[0], buffers[2], drawMode); } else { SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); } @@ -937,6 +984,12 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac // Blending function for alpha: transparent pix blended over opaque -> opaque pix if (cullFace) { gl.enable(gl.CULL_FACE); + if (withTransparency) { + gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); + } + else { + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + } gl.cullFace(cullFace); } } @@ -945,11 +998,34 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac if (bufferSetsMask !== null && bufferSetsMask !== undefined && !bufferSetsMask[i]) { continue; } - drawBuffer(drawMode, buffersSets[i]); + + //display spheres with full alpha value + if (isDrawingSpheres) { + gl.uniform1i(GL_shaderProgram.useVertexColors, false); + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + + // set sphere color green for the selected channels ones and yellow for the others + if (VS_selectedchannels.includes(i)) { + gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); + drawBuffer(drawMode, buffersSets[i]); + } + else { + gl.uniform4f(GL_shaderProgram.materialColor, 0.34, 0.95, 0.37, 1.0); + drawBuffer(drawMode, buffersSets[i]); + } + gl.uniform1i(GL_shaderProgram.useVertexColors, true); + + if (withTransparency) { + gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); + } + } + else { + drawBuffer(drawMode, buffersSets[i]); + } + } if (useBlending) { - gl.disable(gl.BLEND); gl.disable(gl.CULL_FACE); setLighting(lightSettings); // Draw the same transparent object the second time @@ -1034,6 +1110,11 @@ function tick() { const currentTimeInFrame = Math.floor((currentTimeValue - totalPassedActivitiesData) / TIME_STEP); updateColors(currentTimeInFrame); + //update energy + if(timeselection_interval!=0 && !AG_isStopped){ + + changeCubicalMeasurePoints_energy(); + } drawScene(); /// Update FPS and Movie timeline @@ -1097,9 +1178,15 @@ function drawScene() { } if (isInternalSensorView) { - // for internal sensors we render only the sensors + gl.uniform1f(GL_shaderProgram.alphaUniform, 1); drawBuffers(gl.TRIANGLES, measurePointsBuffers); } else { + //draw the nodes first to make it appear + if (displayMeasureNodes) { + isDrawingSpheres = true; + drawBuffers(gl.TRIANGLES, measurePointsBuffers); + isDrawingSpheres = false; + } // draw surface drawBuffers(drawingMode, brainBuffers, bufferSetsMask); @@ -1108,9 +1195,6 @@ function drawScene() { if (drawTriangleLines) { drawBrainLines(brainLinesBuffers, brainBuffers, bufferSetsMask); } - if (displayMeasureNodes) { - drawBuffers(gl.TRIANGLES, measurePointsBuffers); - } } if (isFaceToDisplay) { @@ -1127,7 +1211,6 @@ function drawScene() { } else { gl.bindFramebuffer(gl.FRAMEBUFFER, GL_colorPickerBuffer); - gl.disable(gl.BLEND); gl.disable(gl.DITHER); gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); setLighting(pickingLightSettings); @@ -1136,12 +1219,18 @@ function drawScene() { GL_initColorPickingData(NO_OF_MEASURE_POINTS); } + isDrawingSpheres = true; for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { const mpColor = GL_colorPickerInitColors[i]; gl.uniform4fv(GL_shaderProgram.materialColor, mpColor); drawBuffer(gl.TRIANGLES, measurePointsBuffers[i]); } + isDrawingSpheres = false; VS_pickedIndex = GL_getPickedIndex(); + //display the channel name + if (VS_pickedIndex != -1) { + displayMessage("The highlighted node is " + measurePointsLabels[VS_pickedIndex], "infoMessage") + } doPick = false; gl.bindFramebuffer(gl.FRAMEBUFFER, null); } @@ -1311,3 +1400,18 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// +/////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// +//init spheres with energy controlling the radius +function changeCubicalMeasurePoints_energy() { + selectedchannels=tsView.channels(); + for (let i = 0; i < selectedchannels.length; i++) { + // generate spheres + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[selectedchannels[i]],timeselection_energy[i][currentTimeValue]); + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } +} +/////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html index be39f12ea..2113f1387 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_2d_view.html @@ -58,11 +58,16 @@
    - ms + Time Selection + to ms
    Submit
    +
  • +
  • + Interval: + 0
    diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html index 395b2ee4f..75d06e921 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -1,7 +1,7 @@
    - + diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html deleted file mode 100644 index b007a047b..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/gl_dual_view_header.html +++ /dev/null @@ -1,23 +0,0 @@ - -
    - - - - - - - - - - - - - - - - - - - -
    \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js index 042d6c7aa..8fc142302 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js @@ -4,7 +4,7 @@ * TheVirtualBrain-Scientific Package (for simulators). See content of the * documentation-folder for more details. See also http://www.thevirtualbrain.org * - * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others and others + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others * * This program is free software: you can redistribute it and/or modify it under the * terms of the GNU General Public License as published by the Free Software Foundation, @@ -17,33 +17,6 @@ * **/ -/* globals gl, GL_shaderProgram, SHADING_Context tsView */ - -/** - * WebGL methods "inheriting" from webGL_xx.js in static/js. - */ - - -var _alphaValue = 0.1; - -/** - * Change transparency of cortical surface from user-input. - * - * @param inputField user given input value for transparency of cortical-surface - */ -function changeSurfaceTransparency(inputField) { - var newValue = inputField.value; - - if (!isNaN(parseFloat(newValue)) && isFinite(newValue) && parseFloat(newValue) >= 0 && parseFloat(newValue) <= 1) { - _alphaValue = parseFloat(newValue); - } else { - inputField.value = _alphaValue; - displayMessage("Transparency value should be a number between 0 and 1.", "warningMessage"); - } -} - -// below is the modified code from virtualBrain.js - /* The comment below lists the global functions used in this file. * It is here to make jshint happy and to document these implicit global dependencies. * In the future we might group these into namespace objects. @@ -159,9 +132,7 @@ var withTransparency = false; var drawingMode; var VS_showLegend = true; var isInternalSensorView = false; - -//display spheres by default -var displayMeasureNodes = true; +var displayMeasureNodes = false; var isFaceToDisplay = false; var drawNavigator = false; @@ -190,11 +161,28 @@ var near = 0.1; var VS_pickedIndex = -1; //selected channels used to color the energy spheres var VS_selectedchannels=[]; - var VB_BrainNavigator; -//indicating we are drawing the spheres and applying material colors +//default time selection time +var timeselection_interval=0; +//indicating we are drawing the energy spheres and applying material colors var isDrawingSpheres = false; +/** + * Change transparency of cortical surface from user-input. + * + * @param inputField user given input value for transparency of cortical-surface + */ +var _alphaValue = 1; +function changeSurfaceTransparency(inputField) { + var newValue = inputField.value; + + if (!isNaN(parseFloat(newValue)) && isFinite(newValue) && parseFloat(newValue) >= 0 && parseFloat(newValue) <= 1) { + _alphaValue = parseFloat(newValue); + } else { + inputField.value = _alphaValue; + displayMessage("Transparency value should be a number between 0 and 1.", "warningMessage"); + } +} function VS_init_hemisphere_mask(hemisphere_chunk_mask) { @@ -376,10 +364,9 @@ function _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVert } } - function _VS_init_cubicalMeasurePoints() { for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later + const result = HLPR_bufferAtPoint(gl, measurePoints[i]); const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; @@ -388,6 +375,16 @@ function _VS_init_cubicalMeasurePoints() { } } +function _VS_init_sphereMeasurePoints() { + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } +} function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, @@ -419,7 +416,7 @@ function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, urlRegionMapList, minActivity, maxActivity, oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, urlMeasurePointsLabels, boundaryURL); - _VS_init_cubicalMeasurePoints(); + _VS_init_sphereMeasurePoints(); if (!isDoubleView) { // If this is a brain activity viewer then we have to initialize the selection component @@ -429,7 +426,8 @@ function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, withTransparency = transparencyStatus; //pause by default AG_isStopped = true; - + _alphaValue=0.1; + displayMeasureNodes=true; } function _isValidActivityData() { @@ -564,7 +562,7 @@ function _initSliders() { if (timeData.length > 0) { $("#sliderStep").slider({ - min: 0.49, max: maxSpeedSlider, step: 1, value: 5, + min: 0, max: maxSpeedSlider, step: 1, value: 5, stop: function () { refreshCurrentDataSlice(); sliderSel = false; @@ -725,7 +723,6 @@ function customMouseDown(event) { if (displayMeasureNodes) { doPick = true; } - } function customMouseUp(event) { @@ -966,8 +963,6 @@ function drawBuffer(drawMode, buffers) { } else { SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); } - - } /** @@ -1011,11 +1006,11 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac // set sphere color green for the selected channels ones and yellow for the others if (VS_selectedchannels.includes(i)) { - gl.uniform4f(GL_shaderProgram.materialColor, 0.34, 0.95, 0.37, 1.0); + gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); drawBuffer(drawMode, buffersSets[i]); } else { - gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); + gl.uniform4f(GL_shaderProgram.materialColor, 0.34, 0.95, 0.37, 1.0); drawBuffer(drawMode, buffersSets[i]); } gl.uniform1i(GL_shaderProgram.useVertexColors, true); @@ -1026,7 +1021,6 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac } else { drawBuffer(drawMode, buffersSets[i]); - } } @@ -1034,12 +1028,10 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac if (useBlending) { gl.disable(gl.CULL_FACE); setLighting(lightSettings); - // Draw the same transparent object the second time if (cullFace === gl.FRONT) { drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, gl.BACK); } - } } @@ -1137,8 +1129,7 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { - //TODO workaround for incorrect time values - document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue]+0.49, 2); + document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); } let meanFrameTime = 0; for (let i = 0; i < framestime.length; i++) { @@ -1193,13 +1184,9 @@ function drawScene() { //draw the nodes first to make it appear if (displayMeasureNodes) { isDrawingSpheres = true; - - drawBuffers(gl.TRIANGLES, measurePointsBuffers); isDrawingSpheres = false; - } - // draw surface drawBuffers(drawingMode, brainBuffers, bufferSetsMask); @@ -1208,7 +1195,6 @@ function drawScene() { if (drawTriangleLines) { drawBrainLines(brainLinesBuffers, brainBuffers, bufferSetsMask); } - } if (isFaceToDisplay) { @@ -1223,7 +1209,6 @@ function drawScene() { VB_BrainNavigator.drawNavigator(); } - } else { gl.bindFramebuffer(gl.FRAMEBUFFER, GL_colorPickerBuffer); gl.disable(gl.DITHER); @@ -1234,7 +1219,6 @@ function drawScene() { GL_initColorPickingData(NO_OF_MEASURE_POINTS); } - isDrawingSpheres = true; for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { const mpColor = GL_colorPickerInitColors[i]; @@ -1242,7 +1226,6 @@ function drawScene() { drawBuffer(gl.TRIANGLES, measurePointsBuffers[i]); } isDrawingSpheres = false; - VS_pickedIndex = GL_getPickedIndex(); //display the channel name if (VS_pickedIndex != -1) { @@ -1417,7 +1400,6 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// - /////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// //init spheres with energy controlling the radius function changeCubicalMeasurePoints_energy() { diff --git a/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html b/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html index 26b87fae5..32c4db0cb 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html @@ -1,5 +1,5 @@
    - + From be663fe31ea442dcd6c7fddeffddfcf9554da737 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 30 Jul 2018 12:22:00 +1000 Subject: [PATCH 26/53] TVB-2378 Fix Energy for zoomed time slice --- tvb/interfaces/web/static/js/tvbviz.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 0f7492f60..a30cb9b6a 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1143,8 +1143,13 @@ tv.plot = { if (triggered) { timeselection_interval = timeselection[1] - timeselection[0]; timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; + //retrieve energy for the whole timeline rather than a slice + var all_slice=f.current_slice(); + all_slice[0].di=f.shape()[1]; + all_slice[0].hi=f.shape()[0]; + all_slice[0].lo=0; //call the energy computation method - tv.util.get_time_selection_energy(f.baseURL(), f.current_slice(), f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); //update the time in the input tag d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); //update the time in the 3d viewer's time @@ -1183,7 +1188,6 @@ tv.plot = { } f.jump_to_next_time_range = function () { - var time_data_length = f.shape()[0]; var current_slice_length = f.current_slice()[0].hi - f.current_slice()[0].lo; if (f.current_slice()[0].hi + current_slice_length < time_data_length) { From b7afef498caaddfe73d1d6532feb8390cf49d9cb Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 30 Jul 2018 15:23:27 +1000 Subject: [PATCH 27/53] TVB-2379 Remove measurepoint control and unused files --- .../visualizers/new_dual_brain/controls.html | 3 +- .../dual_brain_toggle.controls.html | 59 - .../visualizers/new_dual_brain/preview.html | 0 .../scripts/timeseries3DScript.js | 1417 ----------------- 4 files changed, 2 insertions(+), 1477 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index 214a77834..c06dc24d7 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -36,7 +36,8 @@ - + +
    diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html deleted file mode 100644 index 504c0dece..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_toggle.controls.html +++ /dev/null @@ -1,59 +0,0 @@ - -
    - - -
    View hemispheres
    -
    - - -
    - - -
    - - -
    -
    - -
    Lighting
    -
    - -
    - -
    Show
    -
    - -
    - -
    -
    - -
    - - -
    -
    - -
    -
    - -
    Toggle
    -
    - -
    - -
    -
    - -
    -
    \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/preview.html deleted file mode 100644 index e69de29bb..000000000 diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js deleted file mode 100644 index 8fc142302..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/timeseries3DScript.js +++ /dev/null @@ -1,1417 +0,0 @@ -/** - * TheVirtualBrain-Framework Package. This package holds all Data Management, and - * Web-UI helpful to run brain-simulations. To use it, you also need do download - * TheVirtualBrain-Scientific Package (for simulators). See content of the - * documentation-folder for more details. See also http://www.thevirtualbrain.org - * - * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others - * - * This program is free software: you can redistribute it and/or modify it under the - * terms of the GNU General Public License as published by the Free Software Foundation, - * either version 3 of the License, or (at your option) any later version. - * This program is distributed in the hope that it will be useful, but WITHOUT ANY - * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A - * PARTICULAR PURPOSE. See the GNU General Public License for more details. - * You should have received a copy of the GNU General Public License along with this - * program. If not, see . - * - **/ - -/* The comment below lists the global functions used in this file. - * It is here to make jshint happy and to document these implicit global dependencies. - * In the future we might group these into namespace objects. - * ( Global state is not in this list except gl and namespaces; let them be warnings ) - */ - -/* globals gl, SHADING_Context, GL_shaderProgram, displayMessage, HLPR_readJSONfromFile, readDataPageURL, - GL_handleKeyDown, GL_handleKeyUp, GL_handleMouseMove, GL_handleMouseWeel, - initGL, updateGLCanvasSize, LEG_updateLegendVerticesBuffers, - basicInitShaders, basicInitSurfaceLighting, GL_initColorPickFrameBuffer, - ColSchGetTheme, LEG_generateLegendBuffers, LEG_initMinMax - */ - -/** - * WebGL methods "inheriting" from webGL_xx.js in static/js. - */ -var BRAIN_CANVAS_ID = "GLcanvas"; -/** - * Variables for displaying Time and computing Frames/Sec - */ -var lastTime = 0; -var framestime = [50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, - 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50]; - -/** - * Time like entities: - * The movie time - * Measured in 'time steps' - * An index in the activitiesData array - * The display time - * Measured in 'ticks' - * Updated every TICK_STEP ms. - * We do not keep the value of this time. - * The displayed movie time - * The int value of it is in currentTimeValue. - * Measured in 'time steps'. - * Synchronizes the movie time to the display time. - */ - -/** - * Granularity of the display time in ms. - */ -var TICK_STEP = 33; // 30Hz -/** - * How many movie time steps for a display tick. - * If this is < 1 a movie frame will last 1/timeStepsPerTick ticks - */ -var timeStepsPerTick = 1; -/** - * The integer part of timeStepsPerTick - */ -var TIME_STEP = 1; -/** - * The current time in the activity movie. - * An index of the current movie frame. - * When timeStepsPerTick > it increments by TIME_STEP every tick. - * When timeStepsPerTick < 1 it increments by 1 every 1/timeStepsPerTick tick. - */ -var currentTimeValue = 0; -/** - * The maximum possible value of currentTimeValue - */ -var MAX_TIME = 0; -/** - * For how many display ticks have we drawn the same time step. - */ -var elapsedTicksPerTimeStep = 0; -/** - * At the maximum speed the time line finishes in 32 steps - * This is approximately 1s wall time (ignoring data fetches). - */ -var ACTIVITY_FRAMES_IN_TIME_LINE_AT_MAX_SPEED = 32; - -var sliderSel = false; - -var isPreview = false; -/** - * This buffer arrays will contain: - * arr[i][0] Vertices buffer - * arr[i][1] Normals buffer - * arr[i][2] Triangles indices buffer - * arr[i][3] Color buffer (same length as vertices /3 * 4) in case of one-to-one mapping - * arr[i][3] Region indexes, when not one-to-one mapping - */ -var brainBuffers = []; -var brainLinesBuffers = []; -var shelfBuffers = []; -var measurePointsBuffers = []; - -var regionBoundariesController = null; - -var activitiesData = [], timeData = [], measurePoints = [], measurePointsLabels = []; - -var pageSize = 0; -var urlBase = ''; -var selectedMode = 0; -var selectedStateVar = 0; -var currentActivitiesFileLength = 0; -var nextActivitiesFileData = []; -var totalPassedActivitiesData = 0; -var shouldIncrementTime = true; -var currentAsyncCall = null; - -var NO_OF_MEASURE_POINTS = 0; -var NEXT_PAGE_THREASHOLD = 100; - -var activityMin = 0, activityMax = 0; -var isOneToOneMapping = false; -var isDoubleView = false; -var isEEGView = false; -//apply transparency on the shell surface -var withTransparency = false; -var drawingMode; -var VS_showLegend = true; -var isInternalSensorView = false; -var displayMeasureNodes = false; -var isFaceToDisplay = false; - -var drawNavigator = false; -var drawTriangleLines = false; -var drawSpeculars = false; -/** - * Used to determine which buffer chunks belong to a hemisphere. - * The chunks are used to limit geometry size for a draw call. - */ -var VS_hemisphere_chunk_mask = null; -var bufferSetsMask = null; -var VS_hemisphereVisibility = null; -/** - * What regions are selected to be shown. - * Unselected regions are greyed out. - * This is used only by the brain activity movie for region level activity. - * For static viewers it is initialized to a full selection - */ -var VS_selectedRegions = []; -/** - * camera settings - */ -var near = 0.1; - -// index of the currently selected node. This is equivalent to CONN_pickedIndex -var VS_pickedIndex = -1; -//selected channels used to color the energy spheres -var VS_selectedchannels=[]; -var VB_BrainNavigator; - -//default time selection time -var timeselection_interval=0; -//indicating we are drawing the energy spheres and applying material colors -var isDrawingSpheres = false; -/** - * Change transparency of cortical surface from user-input. - * - * @param inputField user given input value for transparency of cortical-surface - */ -var _alphaValue = 1; -function changeSurfaceTransparency(inputField) { - var newValue = inputField.value; - - if (!isNaN(parseFloat(newValue)) && isFinite(newValue) && parseFloat(newValue) >= 0 && parseFloat(newValue) <= 1) { - _alphaValue = parseFloat(newValue); - } else { - inputField.value = _alphaValue; - displayMessage("Transparency value should be a number between 0 and 1.", "warningMessage"); - } -} - - -function VS_init_hemisphere_mask(hemisphere_chunk_mask) { - VS_hemisphere_chunk_mask = hemisphere_chunk_mask; - if (hemisphere_chunk_mask !== null && hemisphere_chunk_mask !== undefined) { - bufferSetsMask = []; - for (let i = 0; i < VS_hemisphere_chunk_mask.length; i++) { - bufferSetsMask[i] = 1; - } - } -} - -function VS_SetHemisphere(h) { - VS_hemisphereVisibility = h; - for (let i = 0; i < VS_hemisphere_chunk_mask.length; i++) { - if (h === null || h === undefined) { - bufferSetsMask[i] = 1; - } else if (h === 'l') { - bufferSetsMask[i] = 1 - VS_hemisphere_chunk_mask[i]; - } else if (h === 'r') { - bufferSetsMask[i] = VS_hemisphere_chunk_mask[i]; - } - } -} - -function VS_StartPortletPreview(baseDatatypeURL, urlVerticesList, urlTrianglesList, urlNormalsList, noOfMeasurePoints, - urlRegionMapList, boundaryURL, minActivity, maxActivity, oneToOneMapping) { - isPreview = true; - pageSize = 1; - urlBase = baseDatatypeURL; - activitiesData = HLPR_readJSONfromFile(readDataSplitPageURL(urlBase, 0, 1, selectedStateVar, selectedMode, TIME_STEP)); - if (oneToOneMapping === 'True') { - isOneToOneMapping = true; - } - activityMin = parseFloat(minActivity); - activityMax = parseFloat(maxActivity); - - NO_OF_MEASURE_POINTS = noOfMeasurePoints; - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - VS_selectedRegions.push(i); - } - - const canvas = document.getElementById(BRAIN_CANVAS_ID); - customInitGL(canvas); - initShaders(); - if (urlVerticesList) { - brainBuffers = initBuffers($.parseJSON(urlVerticesList), $.parseJSON(urlNormalsList), $.parseJSON(urlTrianglesList), - $.parseJSON(urlRegionMapList), false); - } - - ColSch_initColorSchemeComponent(activityMin, activityMax); - LEG_initMinMax(activityMin, activityMax); - LEG_generateLegendBuffers(); - - VB_BrainNavigator = new NAV_BrainNavigator(isOneToOneMapping, brainBuffers, measurePoints, measurePointsLabels); - regionBoundariesController = new RB_RegionBoundariesController(boundaryURL); - - // Enable keyboard and mouse interaction - canvas.onkeydown = GL_handleKeyDown; - canvas.onkeyup = GL_handleKeyUp; - canvas.onmousedown = customMouseDown; - canvas.oncontextmenu = function () { - return false; - }; - $(document).on('mousemove', GL_handleMouseMove); - $(document).on('mouseup', customMouseUp); - // We use drawScene instead of tick because tick's performance is worse. - // Portlet previews are static, not movies. Tick's movie update is not required. - // A call to updateColors has to be made to initialize the color buffer. - updateColors(0); - setInterval(drawScene, TICK_STEP); -} - -function _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, - noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelfObject, - hemisphereChunkMask, argDisplayMeasureNodes, argIsFaceToDisplay, - minMeasure, maxMeasure, urlMeasure) { - // initialize global configuration - isDoubleView = false; - isOneToOneMapping = false; - shouldIncrementTime = false; - AG_isStopped = true; - displayMeasureNodes = argDisplayMeasureNodes; - isFaceToDisplay = argIsFaceToDisplay; // this could be retrieved from the dom like drawNavigator - // make checkbox consistent with this flag - $("#displayFaceChkId").attr('checked', isFaceToDisplay); - drawNavigator = $("#showNavigator").prop('checked'); - - if (noOfMeasurePoints === 0) { - // we are viewing a surface with no region mapping - // we mock 1 measure point - measurePoints = [[0, 0, 0]]; - measurePointsLabels = ['']; - NO_OF_MEASURE_POINTS = 1; - // mock one activity frame - activityMin = 0; - activityMax = 1; - activitiesData = [[0]]; - } else { - _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels); - activityMin = parseFloat(minMeasure); - activityMax = parseFloat(maxMeasure); - let measure; - if (urlMeasure === '') { - // Empty url => The static viewer has to show a region map. - // The measure will be a range(NO_OF_MEASURE_POINTS) - measure = []; - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - measure.push(i); - } - } else { - measure = HLPR_readJSONfromFile(urlMeasure); - } - // The activity data will contain just one frame containing the values of the connectivity measure. - activitiesData = [measure]; - } - - VS_showLegend = false; - if (parseFloat(minMeasure) < parseFloat(maxMeasure)) { - const brainLegendDiv = document.getElementById('brainLegendDiv'); - ColSch_updateLegendLabels(brainLegendDiv, minMeasure, maxMeasure, "100%"); - VS_showLegend = true; - } - - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - VS_selectedRegions.push(i); - } - - const canvas = document.getElementById(BRAIN_CANVAS_ID); - _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, - urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphereChunkMask); - - _bindEvents(canvas); - - //specify the re-draw function. - if (_isValidActivityData()) { - setInterval(tick, TICK_STEP); - } -} - -function _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, - urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, - urlRegionMapList, minActivity, maxActivity, - oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, urlMeasurePointsLabels, boundaryURL) { - // initialize global configuration - isDoubleView = doubleView; - if (oneToOneMapping === 'True') { - isOneToOneMapping = true; - } - // these global flags could be structured better - isEEGView = isDoubleView && !isInternalSensorView; - activityMin = parseFloat(minActivity); - activityMax = parseFloat(maxActivity); - pageSize = onePageSize; - urlBase = baseDatatypeURL; - - // initialize global data - _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels); - _initTimeData(urlTimeList); - initActivityData(); - - if (isDoubleView) { - $("#displayFaceChkId").trigger('click'); - } - drawNavigator = $("#showNavigator").prop('checked'); - - const canvas = document.getElementById(BRAIN_CANVAS_ID); - - _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, - urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphereChunkMask); - - _bindEvents(canvas); - - _initSliders(); - - //specify the re-draw function. - if (_isValidActivityData()) { - setInterval(tick, TICK_STEP); - } -} - -function _VS_init_cubicalMeasurePoints() { - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_bufferAtPoint(gl, measurePoints[i]); - const bufferVertices = result[0]; - const bufferNormals = result[1]; - const bufferTriangles = result[2]; - const bufferColor = createColorBufferForCube(false); - measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; - } -} - -function _VS_init_sphereMeasurePoints() { - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later - const bufferVertices = result[0]; - const bufferNormals = result[1]; - const bufferTriangles = result[2]; - const bufferColor = createColorBufferForCube(false); - measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; - } -} - -function VS_StartSurfaceViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, - noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, - boundaryURL, shelveObject, minMeasure, maxMeasure, urlMeasure, hemisphereChunkMask) { - - _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, - noOfMeasurePoints, urlRegionMapList, urlMeasurePointsLabels, boundaryURL, shelveObject, - hemisphereChunkMask, false, false, minMeasure, maxMeasure, urlMeasure); - _VS_init_cubicalMeasurePoints(); -} - -function VS_StartEEGSensorViewer(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, - noOfMeasurePoints, urlMeasurePointsLabels, - shelfObject, minMeasure, maxMeasure, urlMeasure) { - isEEGView = true; - _VS_static_entrypoint(urlVerticesList, urlLinesList, urlTrianglesList, urlNormalsList, urlMeasurePoints, - noOfMeasurePoints, '', urlMeasurePointsLabels, '', shelfObject, null, true, true, - minMeasure, maxMeasure, urlMeasure); - _VS_init_cubicalMeasurePoints(); -} - -function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, - urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, - urlRegionMapList, minActivity, maxActivity, - oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, - urlMeasurePointsLabels, boundaryURL, measurePointsSelectionGID, transparencyStatus) { - _VS_movie_entrypoint(baseDatatypeURL, onePageSize, urlTimeList, urlVerticesList, urlLinesList, - urlTrianglesList, urlNormalsList, urlMeasurePoints, noOfMeasurePoints, - urlRegionMapList, minActivity, maxActivity, - oneToOneMapping, doubleView, shelfObject, hemisphereChunkMask, - urlMeasurePointsLabels, boundaryURL); - _VS_init_sphereMeasurePoints(); - - if (!isDoubleView) { - // If this is a brain activity viewer then we have to initialize the selection component - _initChannelSelection(measurePointsSelectionGID); - // For the double view the selection is the responsibility of the extended view functions - } - withTransparency = transparencyStatus; - //pause by default - AG_isStopped = true; - _alphaValue=0.1; - displayMeasureNodes=true; -} - -function _isValidActivityData() { - if (isOneToOneMapping) { - if (activitiesData.length !== brainBuffers.length) { - displayMessage("The number of activity buffers should equal the number of split surface slices", "errorMessage"); - return false; - } - if (3 * activitiesData[0][0].length !== brainBuffers[0][0].numItems) { - displayMessage("The number of activity points should equal the number of surface vertices", "errorMessage"); - return false; - } - } else { - if (NO_OF_MEASURE_POINTS !== activitiesData[0].length) { - displayMessage("The number of activity points should equal the number of regions", "errorMessage"); - return false; - } - } - return true; -} - -/** - * Scene setup common to all webgl brain viewers - */ -function _initViewerGL(canvas, urlVerticesList, urlNormalsList, urlTrianglesList, - urlRegionMapList, urlLinesList, boundaryURL, shelfObject, hemisphere_chunk_mask) { - customInitGL(canvas); - GL_initColorPickFrameBuffer(); - initShaders(); - - if (VS_showLegend) { - LEG_initMinMax(activityMin, activityMax); - ColSch_initColorSchemeGUI(activityMin, activityMax, LEG_updateLegendColors); - LEG_generateLegendBuffers(); - } else { - ColSch_initColorSchemeGUI(activityMin, activityMax); - } - - if (urlVerticesList) { - let parsedIndices = []; - if (urlRegionMapList) { - parsedIndices = $.parseJSON(urlRegionMapList); - } - brainBuffers = initBuffers($.parseJSON(urlVerticesList), $.parseJSON(urlNormalsList), - $.parseJSON(urlTrianglesList), parsedIndices, isDoubleView); - } - - VS_init_hemisphere_mask(hemisphere_chunk_mask); - - brainLinesBuffers = HLPR_getDataBuffers(gl, $.parseJSON(urlLinesList), isDoubleView, true); - regionBoundariesController = new RB_RegionBoundariesController(boundaryURL); - - if (shelfObject) { - shelfObject = $.parseJSON(shelfObject); - shelfBuffers = initBuffers(shelfObject[0], shelfObject[1], shelfObject[2], false, true); - } - - VB_BrainNavigator = new NAV_BrainNavigator(isOneToOneMapping, brainBuffers, measurePoints, measurePointsLabels); -} - - -function _bindEvents(canvas) { - // Enable keyboard and mouse interaction - canvas.onkeydown = GL_handleKeyDown; - canvas.onkeyup = GL_handleKeyUp; - canvas.onmousedown = customMouseDown; - $(document).on('mouseup', customMouseUp); - $(canvas).on('contextmenu', _onContextMenu); - $(document).on('mousemove', GL_handleMouseMove); - - $(canvas).mousewheel(function (event, delta) { - GL_handleMouseWeel(delta); - return false; // prevent default - }); - - if (!isDoubleView) { - const canvasX = document.getElementById('brain-x'); - if (canvasX) { - canvasX.onmousedown = function (event) { - VB_BrainNavigator.moveInXSection(event) - }; - } - const canvasY = document.getElementById('brain-y'); - if (canvasY) { - canvasY.onmousedown = function (event) { - VB_BrainNavigator.moveInYSection(event) - }; - } - const canvasZ = document.getElementById('brain-z'); - if (canvasZ) { - canvasZ.onmousedown = function (event) { - VB_BrainNavigator.moveInZSection(event) - }; - } - } -} - -function _initMeasurePoints(noOfMeasurePoints, urlMeasurePoints, urlMeasurePointsLabels) { - if (noOfMeasurePoints > 0) { - measurePoints = HLPR_readJSONfromFile(urlMeasurePoints); - measurePointsLabels = HLPR_readJSONfromFile(urlMeasurePointsLabels); - NO_OF_MEASURE_POINTS = measurePoints.length; - } else { - NO_OF_MEASURE_POINTS = 0; - measurePoints = []; - measurePointsLabels = []; - } -} - -function _initTimeData(urlTimeList) { - const timeUrls = $.parseJSON(urlTimeList); - for (let i = 0; i < timeUrls.length; i++) { - timeData = timeData.concat(HLPR_readJSONfromFile(timeUrls[i])); - } - MAX_TIME = timeData.length - 1; -} - -function _updateSpeedSliderValue(stepsPerTick) { - let s; - if (stepsPerTick >= 1) { - s = stepsPerTick.toFixed(0); - } else { - s = "1/" + (1 / stepsPerTick).toFixed(0); - } - $("#slider-value").html(s); -} - -function _initSliders() { - const maxAllowedTimeStep = Math.ceil(MAX_TIME / ACTIVITY_FRAMES_IN_TIME_LINE_AT_MAX_SPEED); - // after being converted to the exponential range maxSpeed must not exceed maxAllowedTimeStep - const maxSpeedSlider = Math.min(10, 5 + Math.log(maxAllowedTimeStep) / Math.LN2); - - if (timeData.length > 0) { - $("#sliderStep").slider({ - min: 0, max: maxSpeedSlider, step: 1, value: 5, - stop: function () { - refreshCurrentDataSlice(); - sliderSel = false; - }, - slide: function (event, target) { - // convert the linear 0..10 range to the exponential 1/32..1..32 range - const newStep = Math.pow(2, target.value - 5); - setTimeStep(newStep); - _updateSpeedSliderValue(timeStepsPerTick); - sliderSel = true; - } - }); - // Initialize slider for timeLine - $("#slider").slider({ - min: 0, max: MAX_TIME, - slide: function (event, target) { - sliderSel = true; - currentTimeValue = target.value; - $('#TimeNow').val(currentTimeValue); - }, - change: function (event, ui) { - triggered_by_timeselection = false; - tsView.timeselection_move_fn(); - triggered_by_timeselection = true; - }, - stop: function (event, target) { - sliderSel = false; - loadFromTimeStep(target.value); - } - }); - } else { - $("#divForSliderSpeed").hide(); - } - _updateSpeedSliderValue(timeStepsPerTick); - - $('#TimeNow').click(function () { - if (!AG_isStopped) { - pauseMovie(); - } - $(this).select(); - }).change(function (ev) { - let val = parseFloat(ev.target.value); - if (val === null || val < 0 || val > MAX_TIME) { - val = 0; - ev.target.value = 0; - } - $('#slider').slider('value', val); - loadFromTimeStep(val); - }); -} - -function _initChannelSelection(selectionGID) { - const vs_regionsSelector = TVBUI.regionSelector("#channelSelector", {filterGid: selectionGID}); - - vs_regionsSelector.change(function (value) { - VS_selectedRegions = []; - for (let i = 0; i < value.length; i++) { - VS_selectedRegions.push(parseInt(value[i], 10)); - } - }); - //sync region filter with initial selection - VS_selectedRegions = []; - const selection = vs_regionsSelector.val(); - for (let i = 0; i < selection.length; i++) { - VS_selectedRegions.push(parseInt(selection[i], 10)); - } - const mode_selector = TVBUI.modeAndStateSelector("#channelSelector", 0); - mode_selector.modeChanged(VS_changeMode); - mode_selector.stateVariableChanged(VS_changeStateVariable); -} - -////////////////////////////////////////// GL Initializations ////////////////////////////////////////// - -function customInitGL(canvas) { - window.onresize = function () { - updateGLCanvasSize(BRAIN_CANVAS_ID); - LEG_updateLegendVerticesBuffers(); - }; - initGL(canvas); - drawingMode = gl.TRIANGLES; - gl.newCanvasWidth = canvas.clientWidth; - gl.newCanvasHeight = canvas.clientHeight; - canvas.redrawFunctionRef = drawScene; // interface-like function used in HiRes image exporting - canvas.multipleImageExport = VS_multipleImageExport; - - gl.clearDepth(1.0); - gl.enable(gl.DEPTH_TEST); - gl.depthFunc(gl.LEQUAL); -} - -/** This callback handles image exporting from this canvas.*/ -function VS_multipleImageExport(saveFigure) { - const canvas = this; - - function saveFrontBack(nameFront, nameBack) { - mvPushMatrix(); - // front - canvas.drawForImageExport(); - saveFigure({suggestedName: nameFront}); - // back: rotate model around the vertical y axis in trackball space (almost camera space: camera has a z translation) - const r = createRotationMatrix(180, [0, 1, 0]); - GL_mvMatrix = GL_cameraMatrix.x(r.x(GL_trackBallMatrix)); - canvas.drawForImageExport(); - saveFigure({suggestedName: nameBack}); - mvPopMatrix(); - } - - // using drawForImageExport because it handles resizing canvas for export - // It is set on canvas in initGL and defers to drawscene. - - if (VS_hemisphere_chunk_mask !== null) { // we have 2 hemispheres - if (VS_hemisphereVisibility === null) { // both are visible => take them apart when taking picture - VS_SetHemisphere('l'); - saveFrontBack('brain-LH-front', 'brain-LH-back'); - VS_SetHemisphere('r'); - saveFrontBack('brain-RH-front', 'brain-RH-back'); - VS_SetHemisphere(VS_hemisphereVisibility); - } else if (VS_hemisphereVisibility === 'l') { // LH is visible => take picture of it only - saveFrontBack('brain-LH-front', 'brain-LH-back'); - } else if (VS_hemisphereVisibility === 'r') { - saveFrontBack('brain-RH-front', 'brain-RH-back'); - } - } else { - // just save front-back view if no hemispheres - saveFrontBack('brain-front', 'brain-back'); - } -} - -function initShaders() { - createAndUseShader("shader-fs", "shader-vs"); - if (isOneToOneMapping) { - SHADING_Context.one_to_one_program_init(GL_shaderProgram); - } else { - SHADING_Context.region_progam_init(GL_shaderProgram, NO_OF_MEASURE_POINTS, legendGranularity); - } -} - -///////////////////////////////////////~~~~~~~~START MOUSE RELATED CODE~~~~~~~~~~~////////////////////////////////// - - -function _onContextMenu() { - if (!displayMeasureNodes || VS_pickedIndex === -1) { - return false; - } - doPick = true; - drawScene(); - $('#nodeNameId').text(measurePointsLabels[VS_pickedIndex]); - $('#contextMenuDiv').css('left', event.offsetX).css('top', event.offsetY).show(); - return false; -} - -var doPick = false; - -function customMouseDown(event) { - GL_handleMouseDown(event, $("#" + BRAIN_CANVAS_ID)); - $('#contextMenuDiv').hide(); - VB_BrainNavigator.temporaryDisableInTimeRefresh(); - if (displayMeasureNodes) { - doPick = true; - } -} - -function customMouseUp(event) { - GL_handleMouseUp(event); - VB_BrainNavigator.endTemporaryDisableInTimeRefresh(); -} - -/////////////////////////////////////////~~~~~~~~END MOUSE RELATED CODE~~~~~~~~~~~////////////////////////////////// - - -////////////////////////////////////////~~~~~~~~~ WEB GL RELATED RENDERING ~~~~~~~///////////////////////////////// -/** - * Update colors for all Positions on the brain. - */ - -function updateColors(currentTimeInFrame) { - const col = ColSchInfo(); - const activityRange = ColSchGetBounds(); - SHADING_Context.colorscheme_set_uniforms(GL_shaderProgram, activityRange.min, activityRange.max, - activityRange.bins, activityRange.centralHoleDiameter); - - if (isOneToOneMapping) { - for (let i = 0; i < brainBuffers.length; i++) { - const activity = new Float32Array(activitiesData[i][currentTimeInFrame]); - gl.bindBuffer(gl.ARRAY_BUFFER, brainBuffers[i][3]); - gl.bufferData(gl.ARRAY_BUFFER, activity, gl.STATIC_DRAW); - gl.uniform1f(GL_shaderProgram.colorSchemeUniform, col.tex_v); - } - } else { - const currentActivity = activitiesData[currentTimeInFrame]; - for (let ii = 0; ii < NO_OF_MEASURE_POINTS; ii++) { - if (VS_selectedRegions.indexOf(ii) !== -1) { - gl.uniform2f(GL_shaderProgram.activityUniform[ii], currentActivity[ii], col.tex_v); - } else { - gl.uniform2f(GL_shaderProgram.activityUniform[ii], currentActivity[ii], col.muted_tex_v); - } - } - // default color for a measure point - gl.uniform2f(GL_shaderProgram.activityUniform[NO_OF_MEASURE_POINTS], activityMin, col.measurePoints_tex_v); - // color used for a picked measure point - gl.uniform2f(GL_shaderProgram.activityUniform[NO_OF_MEASURE_POINTS + 1], activityMax, col.measurePoints_tex_v); - } -} - -function toggleMeasureNodes() { - displayMeasureNodes = !displayMeasureNodes; -} - - -function switchFaceObject() { - isFaceToDisplay = !isFaceToDisplay; -} - -/** - * Draw model with filled Triangles of isolated Points (Vertices). - */ -function wireFrame() { - if (drawingMode === gl.POINTS) { - drawingMode = gl.TRIANGLES; - } else { - drawingMode = gl.POINTS; - } -} - -/** - * Sets a new movie speed. - * To stop the movie set AG_isStopped to true rather than passing 0 here. - */ -function setTimeStep(newTimeStepsPerTick) { - timeStepsPerTick = newTimeStepsPerTick; - if (timeStepsPerTick < 1) { // subunit speed - TIME_STEP = 1; - } else { - TIME_STEP = Math.floor(timeStepsPerTick); - } -} - -function resetSpeedSlider() { - setTimeStep(1); - $("#sliderStep").slider("option", "value", 1); - refreshCurrentDataSlice(); -} - -function setNavigatorVisibility(enable) { - drawNavigator = enable; -} - -function toggleDrawTriangleLines() { - drawTriangleLines = !drawTriangleLines; -} - -function toggleDrawBoundaries() { - regionBoundariesController.toggleBoundariesVisibility(); -} - -function setSpecularHighLights(enable) { - drawSpeculars = enable; -} - -/** - * Creates a list of webGl buffers. - * - * @param dataList a list of lists. Each list will contain the data needed for creating a gl buffer. - */ -function createWebGlBuffers(dataList) { - const result = []; - for (let i = 0; i < dataList.length; i++) { - const buffer = gl.createBuffer(); - gl.bindBuffer(gl.ARRAY_BUFFER, buffer); - gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(dataList[i]), gl.STATIC_DRAW); - buffer.numItems = dataList[i].length; - result.push(buffer); - } - - return result; -} - -/** - * Read data from the specified urls. - * - * @param data_url_list a list of urls from where it should read the data - * @param staticFiles true if the urls points to some static files - */ -function readFloatData(data_url_list, staticFiles) { - const result = []; - for (let i = 0; i < data_url_list.length; i++) { - let data_json = HLPR_readJSONfromFile(data_url_list[i], staticFiles); - if (staticFiles) { - for (let j = 0; j < data_json.length; j++) { - data_json[j] = parseFloat(data_json[j]); - } - } - result.push(data_json); - data_json = null; - } - return result; -} - -/** - * Computes the data for alpha and alphasIndices. - * - * @param vertices a list which contains lists of vertices. E.g.: [[slice_1_vertices],...,[slice_n_vertices]] - * @param measurePoints a list which contains all the measure points. E.g.: [[x0,y0,z0],[x1,y1,z1],...] - */ -function computeVertexRegionMap(vertices, measurePoints) { - const vertexRegionMap = []; - for (let i = 0; i < vertices.length; i++) { - const reg = []; - for (let j = 0; j < vertices[i].length / 3; j++) { - const currentVertex = vertices[i].slice(j * 3, (j + 1) * 3); - const closestPosition = NAV_BrainNavigator.findClosestPosition(currentVertex, measurePoints); - reg.push(closestPosition); - } - vertexRegionMap.push(reg); - } - return vertexRegionMap; -} - - -/** - * Method used for creating a color buffer for a cube (measure point). - * - * @param isPicked If true then the color used will be - * the one used for drawing the measure points for which the - * corresponding eeg channels are selected. - */ -function createColorBufferForCube(isPicked) { - let pointColor = []; - if (isOneToOneMapping) { - pointColor = [0.34, 0.95, 0.37, 1.0]; - if (isPicked) { - pointColor = [0.99, 0.99, 0.0, 1.0]; - } - } else { - pointColor = [NO_OF_MEASURE_POINTS]; - if (isPicked) { - pointColor = [NO_OF_MEASURE_POINTS + 1]; - } - } - let colors = []; - for (let i = 0; i < 24; i++) { - colors = colors.concat(pointColor); - } - const cubeColorBuffer = gl.createBuffer(); - gl.bindBuffer(gl.ARRAY_BUFFER, cubeColorBuffer); - gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(colors), gl.STATIC_DRAW); - return cubeColorBuffer; -} - -function initBuffers(urlVertices, urlNormals, urlTriangles, urlRegionMap, staticFiles) { - const verticesData = readFloatData(urlVertices, staticFiles); - const vertexBatches = createWebGlBuffers(verticesData); - const normals = HLPR_getDataBuffers(gl, urlNormals, staticFiles); - const indexes = HLPR_getDataBuffers(gl, urlTriangles, staticFiles, true); - - let vertexRegionMap; - if (!isOneToOneMapping) { - if (urlRegionMap && urlRegionMap.length) { - vertexRegionMap = HLPR_getDataBuffers(gl, urlRegionMap); - } else if (isEEGView) { - // if is eeg view than we use the static surface 'eeg_skin_surface' and we have to compute the vertexRegionMap; - // todo: do this on the server to eliminate this special case - const regionData = computeVertexRegionMap(verticesData, measurePoints); - vertexRegionMap = createWebGlBuffers(regionData); - } else { - // Fake buffers, copy of the normals, in case of transparency, we only need dummy ones. - vertexRegionMap = normals; - } - } - - const result = []; - for (let i = 0; i < vertexBatches.length; i++) { - if (isOneToOneMapping) { - const activityBuffer = gl.createBuffer(); - gl.bindBuffer(gl.ARRAY_BUFFER, activityBuffer); - gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertexBatches[i].numItems), gl.STATIC_DRAW); - result.push([vertexBatches[i], normals[i], indexes[i], activityBuffer]); - } else { - result.push([vertexBatches[i], normals[i], indexes[i], vertexRegionMap[i]]); - } - } - return result; -} - -/** - * Make a draw call towards the GL_shaderProgram compiled from common/vertex_shader common_fragment_shader - * Note: all attributes have to be bound even if the shader does not explicitly use them (ex picking mode) - * @param drawMode Triangles / Points - * @param buffers Buffers to be drawn. Array of (vertices, normals, triangles, colors) for one to one mappings - * Array of (vertices, normals, triangles, alphas, alphaindices) for region based drawing - */ -function drawBuffer(drawMode, buffers) { - setMatrixUniforms(); - if (isOneToOneMapping) { - SHADING_Context.one_to_one_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); - } else if (isDrawingSpheres) { - SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[0], buffers[2], drawMode); - } else { - SHADING_Context.region_program_draw(GL_shaderProgram, buffers[0], buffers[1], buffers[3], buffers[2], drawMode); - } -} - -/** - * - * @param drawMode Triangles / Points - * @param buffersSets Actual buffers to be drawn. Array or (vertices, normals, triangles) - * @param [bufferSetsMask] Optional. If this array has a 0 at index i then the buffer at index i is not drawn - * @param [useBlending] When true, the object is drawn with blending (for transparency) - * @param [cullFace] When gl.FRONT, it will mark current object to be drown twice (another with gl.BACK). - * It should be set to GL.FRONT for objects transparent and convex. - */ -function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFace) { - let lightSettings = null; - if (useBlending) { - lightSettings = setLighting(blendingLightSettings); - gl.enable(gl.BLEND); - gl.blendEquationSeparate(gl.FUNC_ADD, gl.FUNC_ADD); - gl.blendFuncSeparate(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA, gl.ONE, gl.ONE_MINUS_SRC_ALPHA); - // Blending function for alpha: transparent pix blended over opaque -> opaque pix - if (cullFace) { - gl.enable(gl.CULL_FACE); - if (withTransparency) { - gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); - } - else { - gl.uniform1f(GL_shaderProgram.alphaUniform, 1); - } - gl.cullFace(cullFace); - } - } - - for (let i = 0; i < buffersSets.length; i++) { - if (bufferSetsMask !== null && bufferSetsMask !== undefined && !bufferSetsMask[i]) { - continue; - } - - //display spheres with full alpha value - if (isDrawingSpheres) { - gl.uniform1i(GL_shaderProgram.useVertexColors, false); - gl.uniform1f(GL_shaderProgram.alphaUniform, 1); - - // set sphere color green for the selected channels ones and yellow for the others - if (VS_selectedchannels.includes(i)) { - gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); - drawBuffer(drawMode, buffersSets[i]); - } - else { - gl.uniform4f(GL_shaderProgram.materialColor, 0.34, 0.95, 0.37, 1.0); - drawBuffer(drawMode, buffersSets[i]); - } - gl.uniform1i(GL_shaderProgram.useVertexColors, true); - - if (withTransparency) { - gl.uniform1f(GL_shaderProgram.alphaUniform, _alphaValue); - } - } - else { - drawBuffer(drawMode, buffersSets[i]); - } - - } - - if (useBlending) { - gl.disable(gl.CULL_FACE); - setLighting(lightSettings); - // Draw the same transparent object the second time - if (cullFace === gl.FRONT) { - drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, gl.BACK); - } - } -} - - -function drawBrainLines(linesBuffers, brainBuffers, bufferSetsMask) { - let lightSettings = null; - if (drawingMode !== gl.POINTS) { - // Usually draw the wire-frame with the same color. But in points mode draw with the vertex colors. - lightSettings = setLighting(linesLightSettings); - } - gl.lineWidth(1.0); - // we want all the brain buffers in this set except the element array buffer (at index 2) - let bufferSets = []; - for (let c = 0; c < brainBuffers.length; c++) { - let chunk = brainBuffers[c].slice(); - chunk[2] = linesBuffers[c]; - bufferSets.push(chunk); - } - drawBuffers(gl.LINES, bufferSets, bufferSetsMask); - if (drawingMode !== gl.POINTS) { - setLighting(lightSettings); - } -} - -/** - * Actual scene drawing step. - */ -function tick() { - - if (sliderSel) { - return; - } - - //// Update activity buffers to be drawn at next step - // If we are in the middle of waiting for the next data file just - // stop and wait since we might have an index that is 'out' of this data slice - if (!AG_isStopped) { - // Synchronizes display time with movie time - let shouldStep = false; - if (timeStepsPerTick >= 1) { - shouldStep = true; - } else if (elapsedTicksPerTimeStep >= (1 / timeStepsPerTick)) { - shouldStep = true; - elapsedTicksPerTimeStep = 0; - } else { - elapsedTicksPerTimeStep += 1; - } - - if (shouldStep && shouldIncrementTime) { - currentTimeValue = currentTimeValue + TIME_STEP; - } - - if (currentTimeValue > MAX_TIME) { - // Next time value is no longer in activity data. - initActivityData(); - if (isDoubleView) { - loadEEGChartFromTimeStep(0); - drawGraph(false, 0); - } - shouldStep = false; - } - - if (shouldStep) { - if (shouldLoadNextActivitiesFile()) { - loadNextActivitiesFile(); - } - if (shouldChangeCurrentActivitiesFile()) { - changeCurrentActivitiesFile(); - } - if (isDoubleView) { - drawGraph(true, TIME_STEP); - } - } - } - - const currentTimeInFrame = Math.floor((currentTimeValue - totalPassedActivitiesData) / TIME_STEP); - updateColors(currentTimeInFrame); - - //update energy - if(timeselection_interval!=0 && !AG_isStopped){ - - changeCubicalMeasurePoints_energy(); - } - drawScene(); - - /// Update FPS and Movie timeline - if (!isPreview) { - const timeNow = new Date().getTime(); - const elapsed = timeNow - lastTime; - - if (lastTime !== 0) { - framestime.shift(); - framestime.push(elapsed); - } - - lastTime = timeNow; - if (timeData.length > 0 && !AG_isStopped) { - document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); - } - let meanFrameTime = 0; - for (let i = 0; i < framestime.length; i++) { - meanFrameTime += framestime[i]; - } - meanFrameTime = meanFrameTime / framestime.length; - document.getElementById("FramesPerSecond").innerHTML = Math.floor(1000 / meanFrameTime).toFixed(); - if (!sliderSel && !AG_isStopped) { - $("#slider").slider("option", "value", currentTimeValue); - } - } -} - -/** - * Draw from buffers. - */ -function drawScene() { - - const theme = ColSchGetTheme().surfaceViewer; - gl.clearColor(theme.backgroundColor[0], theme.backgroundColor[1], theme.backgroundColor[2], theme.backgroundColor[3]); - gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight); - - // Draw sections before setting the correct draw perspective, to work with "rel-time refresh of sections" - VB_BrainNavigator.maybeRefreshSections(); - - // View angle is 45, we want to see object from near up to 800 distance from camera - perspective(45, gl.viewportWidth / gl.viewportHeight, near, 800.0); - - mvPushMatrix(); - mvRotate(180, [0, 0, 1]); - - if (!doPick) { - gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); - - if (drawSpeculars) { - setLighting(specularLightSettings); - } else { - setLighting(); - } - - if (VS_showLegend) { - mvPushMatrix(); - loadIdentity(); - drawBuffers(gl.TRIANGLES, [LEG_legendBuffers]); - mvPopMatrix(); - } - - if (isInternalSensorView) { - gl.uniform1f(GL_shaderProgram.alphaUniform, 1); - drawBuffers(gl.TRIANGLES, measurePointsBuffers); - } else { - //draw the nodes first to make it appear - if (displayMeasureNodes) { - isDrawingSpheres = true; - drawBuffers(gl.TRIANGLES, measurePointsBuffers); - isDrawingSpheres = false; - } - // draw surface - drawBuffers(drawingMode, brainBuffers, bufferSetsMask); - - regionBoundariesController.drawRegionBoundaries(drawingMode, brainBuffers); - - if (drawTriangleLines) { - drawBrainLines(brainLinesBuffers, brainBuffers, bufferSetsMask); - } - } - - if (isFaceToDisplay) { - const faceDrawMode = isInternalSensorView ? drawingMode : gl.TRIANGLES; - mvPushMatrix(); - mvTranslate(VB_BrainNavigator.getPosition()); - drawBuffers(faceDrawMode, shelfBuffers, null, true, gl.FRONT); - mvPopMatrix(); - } - - if (drawNavigator) { - VB_BrainNavigator.drawNavigator(); - } - - } else { - gl.bindFramebuffer(gl.FRAMEBUFFER, GL_colorPickerBuffer); - gl.disable(gl.DITHER); - gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); - setLighting(pickingLightSettings); - - if (GL_colorPickerInitColors.length === 0) { - GL_initColorPickingData(NO_OF_MEASURE_POINTS); - } - - isDrawingSpheres = true; - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const mpColor = GL_colorPickerInitColors[i]; - gl.uniform4fv(GL_shaderProgram.materialColor, mpColor); - drawBuffer(gl.TRIANGLES, measurePointsBuffers[i]); - } - isDrawingSpheres = false; - VS_pickedIndex = GL_getPickedIndex(); - //display the channel name - if (VS_pickedIndex != -1) { - displayMessage("The highlighted node is " + measurePointsLabels[VS_pickedIndex], "infoMessage") - } - doPick = false; - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - - mvPopMatrix(); -} - -////////////////////////////////////////~~~~~~~~~ END WEB GL RELATED RENDERING ~~~~~~~///////////////////////////////// - - -/////////////////////////////////////// ~~~~~~~~~~ DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// - -/** - * Change the currently selected state variable. Get the newly selected value, reset the currentTimeValue to start - * and read the first page of the new mode/state var combination. - */ -function VS_changeStateVariable(id, val) { - selectedStateVar = val; - $("#slider").slider("option", "value", currentTimeValue); - initActivityData(); -} - -/** - * Change the currently selected mode. Get the newly selected value, reset the currentTimeValue to start - * and read the first page of the new mode/state var combination. - */ -function VS_changeMode(id, val) { - selectedMode = val; - $("#slider").slider("option", "value", currentTimeValue); - initActivityData(); -} - -/** - * Just read the first slice of activity data and set the time step to 0. - */ -function initActivityData() { - currentTimeValue = 0; - //read the first file - const initUrl = getUrlForPageFromIndex(0); - activitiesData = HLPR_readJSONfromFile(initUrl); - if (activitiesData !== null && activitiesData !== undefined) { - currentActivitiesFileLength = activitiesData.length * TIME_STEP; - totalPassedActivitiesData = 0; - } -} - -/** - * Load the brainviewer from this given time step. - */ -function loadFromTimeStep(step) { - showBlockerOverlay(50000); - if (step % TIME_STEP !== 0) { - step = step - step % TIME_STEP + TIME_STEP; // Set time to be multiple of step - } - const nextUrl = getUrlForPageFromIndex(step); - currentAsyncCall = null; - readFileData(nextUrl, false); - currentTimeValue = step; - activitiesData = nextActivitiesFileData.slice(0); - nextActivitiesFileData = null; - currentActivitiesFileLength = activitiesData.length * TIME_STEP; - totalPassedActivitiesData = currentTimeValue; - // Also sync eeg monitor if in double view - if (isDoubleView) { - loadEEGChartFromTimeStep(step); - } - closeBlockerOverlay(); -} - -/** - * Refresh the current data with the new time step. - */ -function refreshCurrentDataSlice() { - if (currentTimeValue % TIME_STEP !== 0) { - currentTimeValue = currentTimeValue - currentTimeValue % TIME_STEP + TIME_STEP; // Set time to be multiple of step - } - loadFromTimeStep(currentTimeValue); -} - -/** - * Generate the url that reads one page of data starting from @param index - */ -function getUrlForPageFromIndex(index) { - let fromIdx = index; - if (fromIdx > MAX_TIME) { - fromIdx = 0; - } - const toIdx = fromIdx + pageSize * TIME_STEP; - return readDataSplitPageURL(urlBase, fromIdx, toIdx, selectedStateVar, selectedMode, TIME_STEP); -} - -/** - * If we are at the last NEXT_PAGE_THRESHOLD points of data we should start loading the next data file - * to get an animation as smooth as possible. - */ -function shouldLoadNextActivitiesFile() { - - if (!isPreview && (currentAsyncCall === null) && ((currentTimeValue - totalPassedActivitiesData + NEXT_PAGE_THREASHOLD * TIME_STEP) >= currentActivitiesFileLength)) { - if (nextActivitiesFileData === null || nextActivitiesFileData.length === 0) { - return true; - } - } - return false; -} - -/** - * Start a new async call that should load required data for the next activity slice. - */ -function loadNextActivitiesFile() { - const nextFileIndex = totalPassedActivitiesData + currentActivitiesFileLength; - const nextUrl = getUrlForPageFromIndex(nextFileIndex); - const asyncCallId = new Date().getTime(); - currentAsyncCall = asyncCallId; - readFileData(nextUrl, true, asyncCallId); -} - -/** - * If the next time value is bigger that the length of the current activity loaded data - * that means it's time to switch to the next activity data slice. - */ -function shouldChangeCurrentActivitiesFile() { - return ((currentTimeValue + TIME_STEP - totalPassedActivitiesData) >= currentActivitiesFileLength); -} - -/** - * We've reached the end of the current activity chunk. Time to switch to - * the next one. - */ -function changeCurrentActivitiesFile() { - if (nextActivitiesFileData === null || !nextActivitiesFileData.length) { - // Async data call was not finished, stop incrementing call and wait for data. - shouldIncrementTime = false; - return; - } - - activitiesData = nextActivitiesFileData.slice(0); - nextActivitiesFileData = null; - totalPassedActivitiesData = totalPassedActivitiesData + currentActivitiesFileLength; - currentActivitiesFileLength = activitiesData.length * TIME_STEP; - currentAsyncCall = null; - if (activitiesData && activitiesData.length) { - shouldIncrementTime = true; - } - if (totalPassedActivitiesData >= MAX_TIME) { - totalPassedActivitiesData = 0; - } -} - - -function readFileData(fileUrl, async, callIdentifier) { - nextActivitiesFileData = null; - // Keep a call identifier so we don't "intersect" async calls when two - // async calls are started before the first one finishes. - const self = this; - self.callIdentifier = callIdentifier; - doAjaxCall({ - url: fileUrl, - async: async, - success: function (data) { - if ((self.callIdentifier === currentAsyncCall) || !async) { - nextActivitiesFileData = eval(data); - data = null; - } - } - }); -} - - -/////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// -/////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// -//init spheres with energy controlling the radius -function changeCubicalMeasurePoints_energy() { - selectedchannels=tsView.channels(); - for (let i = 0; i < selectedchannels.length; i++) { - // generate spheres - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[selectedchannels[i]],timeselection_energy[i][currentTimeValue]); - const bufferVertices = result[0]; - const bufferNormals = result[1]; - const bufferTriangles = result[2]; - const bufferColor = createColorBufferForCube(false); - measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; - } -} -/////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// \ No newline at end of file From 3f0ac7149e3467014a4c5078f6d458ee7103d755 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 1 Aug 2018 14:41:06 +1000 Subject: [PATCH 28/53] TVB-2379 Merge opacity shaders Update icon for the paused pages Renew the animated_graph.js --- tvb/interfaces/web/static/js/tvbviz.js | 1 + .../genshi/visualizers/brain/view.html | 2 +- .../commons/shading/vertex_region.glsl | 4 ++- .../shading/vertex_region_opacity.glsl | 26 ------------------- .../new_dual_brain/dual_brain_3d_view.html | 2 +- .../new_dual_brain/vertex_shader_opacity.html | 4 --- 6 files changed, 6 insertions(+), 33 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/vertex_shader_opacity.html diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index a30cb9b6a..94ee24076 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1148,6 +1148,7 @@ tv.plot = { all_slice[0].di=f.shape()[1]; all_slice[0].hi=f.shape()[0]; all_slice[0].lo=0; + //call the energy computation method tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); //update the time in the input tag diff --git a/tvb/interfaces/web/templates/genshi/visualizers/brain/view.html b/tvb/interfaces/web/templates/genshi/visualizers/brain/view.html index a216f2cf7..b23f8beab 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/brain/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/brain/view.html @@ -16,6 +16,6 @@ '${urlRegionBoundaries}', '${measurePointsSelectionGID}'); }); - +
    \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region.glsl b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region.glsl index 7d332809c..bf879bfbf 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region.glsl +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region.glsl @@ -10,6 +10,8 @@ attribute vec3 aVertexNormal; attribute float aVertexRegion; // 127 is the legend granularity uniform vec2 uActivity[${abs(noOfMeasurePoints) + 2} + 127]; +//for surface transparency +uniform float uAlpha; varying vec4 vColor; varying vec3 posInterp; @@ -19,6 +21,6 @@ void main(void) { transformed_pos(aVertexPosition, aVertexNormal, gl_Position, posInterp, normInterp); vec2 uv = uActivity[int(aVertexRegion)]; - vColor = colorSchemeLookup(uv); + vColor.a = uAlpha; } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl b/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl deleted file mode 100644 index bf879bfbf..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/shading/vertex_region_opacity.glsl +++ /dev/null @@ -1,26 +0,0 @@ -{% include transform.glsl %} -{% include colorscheme.glsl %} -/** - * This shader displays region level activity. The activity is stored in the uniform array. - * aVertexRegion is the mapping from vertices to region indices. - */ -attribute vec3 aVertexPosition; -attribute vec3 aVertexNormal; - -attribute float aVertexRegion; -// 127 is the legend granularity -uniform vec2 uActivity[${abs(noOfMeasurePoints) + 2} + 127]; -//for surface transparency -uniform float uAlpha; - -varying vec4 vColor; -varying vec3 posInterp; -varying vec3 normInterp; - -void main(void) { - transformed_pos(aVertexPosition, aVertexNormal, gl_Position, posInterp, normInterp); - - vec2 uv = uActivity[int(aVertexRegion)]; - vColor = colorSchemeLookup(uv); - vColor.a = uAlpha; -} diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html index 75d06e921..de2e1fa59 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_view.html @@ -3,7 +3,7 @@ - + \ No newline at end of file From cb8cc03cabec9ba0f297d58cc46c3f8205c95d56 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 1 Aug 2018 14:52:48 +1000 Subject: [PATCH 29/53] TVB-2379 Add animated_graph.js --- .../new_dual_brain/scripts/animated_graph.js | 1129 +++++++++++++++++ 1 file changed, 1129 insertions(+) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js new file mode 100644 index 000000000..50cf15b12 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js @@ -0,0 +1,1129 @@ +/** + * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * Web-UI helpful to run brain-simulations. To use it, you also need do download + * TheVirtualBrain-Scientific Package (for simulators). See content of the + * documentation-folder for more details. See also http://www.thevirtualbrain.org + * + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others + * + * This program is free software: you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software Foundation, + * either version 3 of the License, or (at your option) any later version. + * This program is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU General Public License for more details. + * You should have received a copy of the GNU General Public License along with this + * program. If not, see . + * + **/ + +/* globals doAjaxCall, readDataPageURL, HLPR_readJSONfromFile */ + +// //it contains all the points that have to be/have been displayed (it contains all the points from the read file); +// //it is an array of arrays (each array contains the points for a certain line chart) +var AG_allPoints = []; +// it supplies the labels for x axis (time in milliseconds) +var AG_time = []; +//it is used for clearing timing events (the event that calls the drawGraph method after a specified time-interval) +var t = null; +//how many elements will be visible on the screen +//computed on the server +var AG_numberOfVisiblePoints = 0; +//all the points that are visible on the screen at a certain moment; the points are read from the AG_allPoints array +//and are translated with a value equal to [AG_translationStep * (AG_noOfLines - the index of the current line)] +//THE FORM of this matrix is: [ [[t1, a1], [t2, a2], ...], [[t1, b1], [t2, b2], ...], ..., [[t1, n1], [t2, n2], ...]] +// t1, t2, ... - represents time that is visible on the screen at a certain moment; +// a1, a2,... - represents the translated values +var AG_displayedPoints = []; +//All the times values that are displayed at a certain moment. To be used by the vertical time line. +var AG_displayedTimes = []; +//the last element that was displayed on the screen is located at this index; the index refers to AG_allPoints array +var AG_currentIndex = 0; +//this var should be set to the length of the AG_allPoints array +var AG_noOfLines = 0; +// the step used for translating the drawn line charts; we translate the drawn line charts because we don't want them to overlap +// the lines will be translated with AG_translationStep * AG_computedStep +var AG_translationStep = 1; +// a scaling factor for the displayed signal +var AG_scaling = 1; +// this var is computed on the server. It is used for line translation (AG_translationStep * AG_computedStep). +var AG_computedStep = 50; +//The normalization steps for each of the channels, in order to bring them centered near the channel bar +var AG_normalizationSteps = []; +//If the animation is paused using pause/start button +var AG_isStopped = false; +//If animation speed is set at a 0 value +var AG_isSpeedZero = false; +//the number of points that are shifted/unshift at a moment +var noOfShiftedPoints = 1; +// List of channels that will be submited on a change of the displayed channels +var AG_submitableSelectedChannels = []; +// contains the indexes of the channels that are displayed +var displayedChannels = []; +// a list of urls pointing to the files from where we should read the time +var timeSetUrls = []; +//a list containing the number of channel in each file specified in 'dataSetUrls' fields +var noOfChannelsPerSet = []; +// the number of points from the longest channel +var maxChannelLength = 0; +// the maximum number of data files from all the submited datatypes +var maxDataFileIndex = 0; +// represents the file index from the dataset that is displayed in the chart +var currentDataFileIndex = 0; +// contains the parsed data for the next file from the dataset +var nextData = []; +// contains the time for the next file from the dataset +var nextTimeData = []; +// true only if the next file from dataset was loaded into memory +var isNextDataLoaded = false; +// true only if the next time data was loaded into memory +var isNextTimeDataLoaded = false; +// true only if the the process of loading a file is started +var AG_isLoadStarted = false; +// this is the number of steps left before updating the next file +var threshold = 10; +// the amount of data that has passed +var totalPassedData = 0; +// the number of channels +var totalNumberOfChannels = 0; +// true only if any of the displayed channels contains NaN values +var nanValueFound = false; +//Channel prefix for each array of data +var channelPrefix = "Channel: "; +// +var totalTimeLength = 0; +//Default values for the x and y axis of the plot +//NOTE: do not remove from the axis AG_options 'labelWidth' and 'labelHeight' because +//this will slow down the animation +var lbl_x_width = 100; +var lbl_x_height = 30; +var zoom_range = [0.1, 20]; + +var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; + +// the index of the cached file (the file that was loaded asynchronous) +var cachedFileIndex = 0; +var labelX = ""; +var chartTitle = ""; +//The displayed labels for the graph +var chanDisplayLabels = []; +// setup plot +var AG_options = { + series: { + shadowSize: 0, + color: 'blue' + }, // drawing is faster without shadows + lines: { + lineWidth: 1, + show: true + }, + yaxis: AG_defaultYaxis, + xaxis: AG_defaultXaxis, + grid: { + backgroundColor: 'white', + hoverable: true, + clickable: true + }, + points: { + show: false, + radius: 0.001 + }, + zoom: { + interactive: false + }, + selection: { + mode: "xy" + }, + legend: { + show: false + }, + hooks: { + processRawData: [processRawDataHook] + } +}; + +var DEFAULT_MAX_CHANNELS = 10; +var plot = null; + +var followingLine = []; +//The required position from which the following vertical time line will start moving with the array +//Expressed as a number from [0, 1], 0 - start from begining, 1 start only at end +var procentualLinePosition = 0.5; +//The actual position in the graph of the following vertical line. Start from -speed to account for the initial translation. +var currentLinePosition = 0; +//The number of points used to display the vertical line. +var numberOfPointsForVerticalLine = 1000; +var isDoubleView = false; + +var AG_homeViewYValues = []; +var AG_homeViewXValues = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +//This will be set to true in the launch_viewer method called by burst small previews +var isSmallPreview = false; + +var targetVerticalLinePosition; + +// The base url for calling any methods on a given datatype +var baseDataURLS = []; +var nrOfPagesSet = []; +var dataPageSize = []; +var tsModes = [0, 0, 0]; +var tsStates = [0, 0, 0]; +var longestChannelIndex = 0; + +// region selection component +var AG_regionSelector = null; +// State mode selector. Used as a global only in dual view +var AG_modeSelector = null; + +function resizeToFillParent() { + const canvas = $('#EEGcanvasDiv'); + let container, width, height; + + if (!isSmallPreview) { + // Just use parent section width and height. For width remove some space for the labels to avoid scrolls + // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. + container = canvas.parent(); + width = container.width() - 40; + height = container.height() - 80; + } else { + container = $('body'); + width = container.width() - 40; + height = container.height() - 20; + } + canvas.width(width).height(height); +} + +window.onresize = function () { + resizeToFillParent(); + redrawPlot(plot.getData()); +}; + +/** + * Animated graph entry point + */ +function AG_startAnimatedChart(ag_settings) { + isSmallPreview = false; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + drawSliderForScale(); + drawSliderForAnimationSpeed(); + _AG_init_selection(ag_settings.measurePointsSelectionGIDs); + + bindHoverEvent(); + initializeCanvasEvents(); + if (!ag_settings.extended_view) { + bindZoomEvent(); + } +} + +function AG_startAnimatedChartPreview(ag_settings) { + isSmallPreview = true; + AG_isStopped = true; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + + // Initialize AG_submitableSelectedChannels + // warning: Assumes channel values are a range + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + for (let i = 0; i < defaultSelectionLength; i++) { + AG_submitableSelectedChannels.push(i); + } + } + + refreshChannels(); +} + +function AG_rePaginate(number_of_visible_points) { + _AG_initPaginationState(number_of_visible_points); + $('#display-page-size').html('' + number_of_visible_points); + refreshChannels(); + if (isDoubleView) { + initActivityData(); + } +} + +/** + * Initialize global state. Part of the AG startup. + * @private + */ +function _AG_initGlobals(ag_settings) { + isDoubleView = ag_settings.extended_view; + // dataSetUrls = $.parseJSON(dataSetPaths); + baseDataURLS = ag_settings.baseURLS; + nrOfPagesSet = ag_settings.nrOfPages; + dataPageSize = ag_settings.pageSize; + chanDisplayLabels = ag_settings.channelLabels; + noOfChannelsPerSet = ag_settings.channelsPerSet; + timeSetUrls = ag_settings.timeSetPaths; + maxChannelLength = parseInt(ag_settings.pageSize); + AG_normalizationSteps = ag_settings.normalizedSteps; + setMaxDataFileIndex(nrOfPagesSet); + totalNumberOfChannels = ag_settings.noOfChannels; + totalTimeLength = ag_settings.totalLength; + nanValueFound = ag_settings.nan_value_found; + AG_computedStep = ag_settings.translationStep; +} + +/** + * Initialize pagination. Part of AG startup. + * @private + */ +function _AG_initPaginationState(number_of_visible_points) { + AG_numberOfVisiblePoints = parseInt(number_of_visible_points); + if (AG_numberOfVisiblePoints > maxChannelLength) { + AG_numberOfVisiblePoints = maxChannelLength; + } + targetVerticalLinePosition = AG_numberOfVisiblePoints * procentualLinePosition; +} + +/** + * Misc common startup logic. Part of AG startup + * @private + */ +function _AG_preStart() { + resizeToFillParent(); +} + +/** + * Creates a selection component for each time series displayed by this eeg view + * Part of AG startup + * The order of the filterGids determines the order of the selectors + * It must have the same ordering as all other timeseries arrays + * @private + */ +function _AG_init_selection(filterGids) { + let i; + let selectors = []; + + /** + * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels + * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) + */ + function getSelectedChannelsAsGlobalIndices() { + let all_selected = []; + let offset = 0; + + for (let i = 0; i < selectors.length; i++) { + const selector = selectors[i]; + const selected_in_current = selector.val(); + + for (let j = 0; j < selected_in_current.length; j++) { + all_selected.push(offset + parseInt(selected_in_current[j], 10)); + } + offset += selector._allValues.length; + } + return all_selected; + } + + // init selectors + let selectorId, selector; + + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.regionSelector(selectorId, {filterGid: filterGids[i]}); + selector.change(function (current_selection) { + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + refreshChannels(); + }); + selectors.push(selector); + } + // the first selector is special. we select by default some channels in it and in case of a dual view + // his selection is synchronized with the brain + AG_regionSelector = selectors[0]; + + // Initialize AG_submitableSelectedChannels + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + // we take the values form the dom, a range(defaultSelectionLength) is not a valid selection if there are multiple time series + AG_submitableSelectedChannels = AG_regionSelector._allValues.slice(0, defaultSelectionLength); + AG_regionSelector.val(AG_submitableSelectedChannels); + } + + // Init the mode selection components. Assumes that there are part of the selector dom + let modeSelectors = []; + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.modeAndStateSelector(selectorId, i); + selector.modeChanged(_AG_changeMode); + selector.stateVariableChanged(_AG_changeStateVariable); + modeSelectors.push(selector); + } + // The dual view needs to subscribe to this selector; so we save it like AG_regionSelector + AG_modeSelector = modeSelectors[0]; + + refreshChannels(); +} + +/** + * Read speed from the dom + * @param defaultSpeed default speed when there is no speed slider + * @private + */ +function _AG_get_speed(defaultSpeed) { + let speed = defaultSpeed; + if (!isSmallPreview && !isDoubleView) { + speed = $("#ctrl-input-speed").slider("value"); + } + return speed; +} + +/* + * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for + * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a + * 'Home' action in a series of zoom events. + */ +function AG_createYAxisDictionary(nr_channels) { + let ticks, yaxis_dict, increment; + + if (AG_translationStep > 0) { + ticks = []; + const step = AG_computedStep * AG_translationStep; + for (let i = 0; i < nr_channels; i++) { + ticks.push([i * step, chanDisplayLabels[displayedChannels[i]]]); + } + yaxis_dict = { + min: -step, + max: (nr_channels + 1) * step, + ticks: ticks, + zoomRange: [0.1, 20] + }; + increment = nr_channels * step / numberOfPointsForVerticalLine; + if (increment === 0) throw "infinite loop"; + for (let k = -step; k < (nr_channels + 1) * step; k += increment) { + followingLine.push([0, k]); + } + } else { + ticks = [0, 'allChannels']; + yaxis_dict = { + min: -AG_computedStep / 2, + max: AG_computedStep / 2, + ticks: ticks, + zoomRange: [0.1, 20] + }; + increment = AG_computedStep / numberOfPointsForVerticalLine; + if (increment === 0) throw "infinite loop"; + for (let kk = -AG_computedStep / 2; kk < AG_computedStep / 2; kk += increment) { + followingLine.push([0, kk]); + } + } + AG_options.yaxis = yaxis_dict; + AG_homeViewYValues = [yaxis_dict.min, yaxis_dict.max]; + AG_defaultYaxis = yaxis_dict; +} + +function refreshChannels() { + submitSelectedChannels(false); + drawGraph(false, noOfShiftedPoints); +} + +function _AG_changeMode(tsIndex, val) { + tsModes[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_changeStateVariable(tsIndex, val) { + tsStates[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_getSelectedDataAndLongestChannelIndex(data) { + let offset = 0; + let selectedData = []; + let channelLengths = []; + + for (let i = 0; i < data.length; i++) { + const selectedChannels = getDisplayedChannels(data[i], offset); + offset += data[i].length; + if (selectedChannels.length > 0) { + channelLengths.push(selectedChannels[0].length); + } else { + channelLengths.push(-1); + } + selectedData = selectedData.concat(selectedChannels); + } + const longestChannelIndex = channelLengths.indexOf(Math.max.apply(Math, channelLengths)); + return {selectedData: selectedData, longestChannelIndex: longestChannelIndex} +} + +/* + * Get required data for the channels in AG_submitableSelectedChannels. If none + * exist then just use the previous 'displayedChannels' (or default in case of first run). + */ +function submitSelectedChannels(isEndOfData) { + + AG_currentIndex = AG_numberOfVisiblePoints; + if (AG_submitableSelectedChannels.length === 0) { + AG_submitableSelectedChannels = displayedChannels.slice(); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + AG_allPoints = []; + displayedChannels = AG_submitableSelectedChannels.slice(0); + generateChannelColors(displayedChannels.length); + + let results = []; + for (let i = 0; i < nrOfPagesSet.length; i++) { + const dataURL = readDataPageURL(baseDataURLS[i], 0, dataPageSize, tsStates[i], tsModes[i]); + const data = HLPR_readJSONfromFile(dataURL); + results.push(parseData(data, i)); + } + const r = _AG_getSelectedDataAndLongestChannelIndex(results); + AG_allPoints = AG_allPoints.concat(r.selectedData); + longestChannelIndex = r.longestChannelIndex; + + // keep data only for the selected channels + AG_noOfLines = AG_allPoints.length; + } + + AG_displayedPoints = []; + AG_displayedTimes = []; + for (let ii = 0; ii < AG_noOfLines; ii++) { + AG_displayedPoints.push([]); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + //read time + readTimeData(0, false); + AG_time = nextTimeData.slice(0); + } + // reset data + nextData = []; + nextTimeData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + currentDataFileIndex = 0; + totalPassedData = 0; + currentLinePosition = 0; + if (nanValueFound) { + displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); + } + + // draw the first 'AG_numberOfVisiblePoints' points + redrawCurrentView(); + if (!isSmallPreview) { + AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; + AG_scaling = $("#ctrl-input-scale").slider("value"); + } else { + AG_translationStep = 1; + } + + AG_createYAxisDictionary(AG_noOfLines); + redrawPlot([]); + resetToDefaultView(); + if (AG_isStopped) { + AG_isStopped = false; + drawGraph(false, noOfShiftedPoints); + AG_isStopped = true; + } else { + drawGraph(false, noOfShiftedPoints); + } +} + +/** + * This method decides if we are at the beginning or end of the graph, in which case we only need + * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. + */ +function shouldMoveLine(direction, shiftNo) { + shiftNo = shiftNo || 1; + let isEndOfGraph = false; + let isStartOfGraph = false; + if (direction === 1) { + isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); + isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); + if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { + isEndOfGraph = false; + } + } else { + isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); + isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); + if (AG_displayedTimes[currentLinePosition] <= 0) { + isStartOfGraph = false; + } + } + + return isStartOfGraph || isEndOfGraph; +} + +var isEndOfData = false; +var AG_channelColorsDict = {}; +var AG_reversedChannelColorsDict = {}; + +/* + * Generate different colors for each channel. + */ +function generateChannelColors(nr_of_channels) { + AG_channelColorsDict = {}; + AG_reversedChannelColorsDict = {}; + let step = parseInt(255 / nr_of_channels); + for (let i = 0; i < nr_of_channels; i++) { + const color = "rgb(" + 250 * (i % 2) + "," + (200 - i * step) + "," + 220 * ((i + 1) % 2) + ")"; + AG_channelColorsDict[color] = i; + AG_reversedChannelColorsDict[i] = color; + } +} + +/* + * Get y-axis labels and update colors to correspond to each channel + */ +function setLabelColors() { + const labels = $('.flot-y-axis .tickLabel'); + for (let i = 0; i < labels.length; i++) { + const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); + if (chan_idx >= 0) { + labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; + labels[i].style.left = 80 + (i % 2) * 40 + 'px'; + } + } +} + +/* + * This method draw the actual plot. The 'executeShift' parameter decides if a shift is + * to be done, or just use the previous data points. 'shiftNo' decides the number of points + * that will be shifted. + */ +function drawGraph(executeShift, shiftNo) { + let i; + noOfShiftedPoints = shiftNo; + if (isEndOfData) { + isEndOfData = false; + submitSelectedChannels(true); + } + if (t !== null && t !== undefined) { + clearTimeout(t); + } + if (AG_isStopped) { + return; + } + if (shouldLoadNextDataFile()) { + loadNextDataFile(); + } + + let direction = 1; + if (_AG_get_speed(1) < 0) { + direction = -1; + } + + let moveLine = shouldMoveLine(direction, noOfShiftedPoints); + //Increment line position in case we need to move the line + if (moveLine && executeShift && !AG_isSpeedZero) { + currentLinePosition = currentLinePosition + noOfShiftedPoints * direction; + } + + if (currentLinePosition >= AG_numberOfVisiblePoints) { + isEndOfData = true; + } + + if (executeShift && !AG_isSpeedZero && !moveLine) { + let count = 0; + if (direction === -1) { + if (currentDataFileIndex > 0 || AG_currentIndex > AG_numberOfVisiblePoints) { + count = 0; + while (count < noOfShiftedPoints && AG_currentIndex - count > AG_numberOfVisiblePoints) { + count = count + 1; + AG_displayedTimes.unshift(AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count]); + for (i = 0; i < AG_displayedPoints.length; i++) { + AG_displayedPoints[i].unshift( + [AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count], + AG_addTranslationStep(AG_allPoints[i][AG_currentIndex - AG_numberOfVisiblePoints - count], i) + ]); + AG_displayedPoints[i].pop(); + } + AG_displayedTimes.pop(); + } + + if (AG_currentIndex - count > AG_numberOfVisiblePoints) { + AG_currentIndex = AG_currentIndex - count; + } else { + AG_currentIndex = Math.min(AG_currentIndex, AG_numberOfVisiblePoints); + if (currentDataFileIndex > 0 && isNextDataLoaded) { + changeCurrentDataFile(); + } + } + } + } else { + if (totalTimeLength > AG_currentIndex + totalPassedData) { + // here we add new 'noOfShiftedPoints' points to the chart and remove the first 'noOfShiftedPoints' visible points + count = 0; + while (count < noOfShiftedPoints && totalTimeLength > AG_currentIndex + count) { + AG_displayedTimes.push(AG_time[AG_currentIndex + count]); + for (i = 0; i < AG_displayedPoints.length; i++) { + AG_displayedPoints[i].push( + [AG_time[AG_currentIndex + count], + AG_addTranslationStep(AG_allPoints[i][AG_currentIndex + count], i) + ]); + AG_displayedPoints[i].shift(); + } + AG_displayedTimes.shift(); + count = count + 1; + } + + if (AG_currentIndex + count < AG_allPoints[longestChannelIndex].length) { + AG_currentIndex = AG_currentIndex + count; + } else { + AG_currentIndex = Math.max(AG_currentIndex, AG_allPoints[longestChannelIndex].length); + if (maxDataFileIndex > 0 && isNextDataLoaded) { + changeCurrentDataFile(); + } + } + } + } + } + if (!AG_isSpeedZero) { + for (i = 0; i < followingLine.length; i++) { + followingLine[i][0] = AG_displayedTimes[currentLinePosition]; + } + let preparedData = []; + for (let j = 0; j < AG_displayedPoints.length; j++) { + preparedData.push({data: AG_displayedPoints[j].slice(0), color: AG_reversedChannelColorsDict[j]}); + } + preparedData.push({data: followingLine, color: 'rgb(255, 0, 0)'}); + plot.setData(preparedData); + plot.setupGrid(); + plot.draw(); + setLabelColors(); + } + if (!isDoubleView) { + t = setTimeout("drawGraph(true, noOfShiftedPoints)", getTimeoutBasedOnSpeed()); + } +} + +/* + * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. + */ +function redrawPlot(data) { + const target = $('#EEGcanvasDiv'); + const resizerChildren = target.children('.ui-resizable-handle'); + for (let i = 0; i < resizerChildren.length; i++) { + target[0].removeChild(resizerChildren[i]); + } + plot = $.plot(target, data, $.extend(true, {}, AG_options)); + for (let j = 0; j < resizerChildren.length; j++) { + target[0].appendChild(resizerChildren[j]); + } + setLabelColors(); +} + + +/** + * This hook will be called before Flot copies and normalizes the raw data for the given + * series. If the function fills in datapoints.points with normalized + * points and sets datapoints.pointsize to the size of the points, + * Flot will skip the copying/normalization step for this series. + */ +function processRawDataHook(plot, series, data, datapoints) { + datapoints.format = [ + {x: true, number: true, required: true}, + {y: true, number: true, required: true} + ]; + datapoints.pointsize = 2; + + for (let i = 0; i < data.length; i++) { + datapoints.points.push(data[i][0]); + datapoints.points.push(data[i][1]); + } + + series.xaxis.used = series.yaxis.used = true; +} + + +/** + * Translate the given value. + * We use this method to translate the values for the drawn line charts because we don't want them to overlap. + * + * @param value the value that should be translated. + * @param index the number of AG_translationSteps that should be used for translating the given value. + * @return {number} + */ +function AG_addTranslationStep(value, index) { + return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; +} + +function getTimeoutBasedOnSpeed() { + const currentAnimationSpeedValue = _AG_get_speed(40); + if (currentAnimationSpeedValue === 0) { + return 300; + } + const timeout = 10 - Math.abs(currentAnimationSpeedValue); + if (timeout === 9) { + return 3000; + } + if (timeout === 8) { + return 2000; + } + if (timeout === 7) { + return 1000; + } + return timeout * 100 + 25; +} + +/* + * Load the data from a given step and center plot around that step. + */ +function loadEEGChartFromTimeStep(step) { + // Read all data for the page in which the selected step falls into + const chunkForStep = Math.floor(step / dataPageSize); + const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); + const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; + AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); + AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); + + totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page + currentDataFileIndex = chunkForStep; + AG_displayedPoints = []; + const indexInPage = step % dataPageSize; // This is the index in the current page that step will have + let fromIdx, toIdx; + currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times + if (indexInPage <= AG_numberOfVisiblePoints / 2) { + if (chunkForStep === 0) { + // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first + // AG_numberOfVisiblePoints values + AG_currentIndex = AG_numberOfVisiblePoints; + currentLinePosition = indexInPage; + prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from before this page + addFromPreviousPage(indexInPage, chunkForStep); + } + } else { + if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { + if (chunkForStep >= nrOfPagesSet[0] - 1) { + // We are at the end of the graph. The line is starting to move further right from the middle position. We are just + // displaying the last AG_numberOfVisiblePoints from the last page + if (AG_time.length > AG_numberOfVisiblePoints) { + fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; + } else { + fromIdx = 0; + } + toIdx = AG_time.length - 1; + AG_currentIndex = toIdx; + currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from after this page + addFromNextPage(indexInPage, chunkForStep); + } + } else { + // We are somewhere in the middle of the graph. + fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; + toIdx = indexInPage + AG_numberOfVisiblePoints / 2; + AG_currentIndex = toIdx; + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } + } + nextData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the previous page. + */ +function addFromPreviousPage(indexInPage, currentPage) { + + const previousPageUrl = readDataPageURL(baseDataURLS[0], (currentPage - 1) * dataPageSize, currentPage * dataPageSize, tsStates[0], tsModes[0]); + let previousData = parseData(HLPR_readJSONfromFile(previousPageUrl), 0); + previousData = getDisplayedChannels(previousData, 0).slice(0); + const previousTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage - 1]); + // Compute which slices we would need from the 'full' two-pages data. + // We only need the difference so to center indexInPage at AG_numberOfVisiblePoints / 2 + let fromIdx, toIdx; + fromIdx = previousData[0].length - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is from where we need to read from previous data + AG_currentIndex = toIdx = AG_numberOfVisiblePoints - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is where we need to add from the current page + // Just generate displayed point and displayed times now + for (let idx = 0; idx < previousData.length; idx++) { + let idy; + let oneLine = []; + // Push data that is from previos slice + for (idy = fromIdx; idy < previousData[0].length; idy++) { + oneLine.push([previousTimeData[idy], AG_addTranslationStep(previousData[idx][idy], idx)]); + } + // Now that that is from our current slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = previousTimeData.slice(fromIdx).concat(AG_time.slice(0, toIdx)); + previousData = null; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the next page. + */ +function addFromNextPage(indexInPage, currentPage) { + + const followingPageUrl = readDataPageURL(baseDataURLS[0], (currentPage + 1) * dataPageSize, (currentPage + 2) * dataPageSize, tsStates[0], tsModes[0]); + let followingData = parseData(HLPR_readJSONfromFile(followingPageUrl), 0); + followingData = getDisplayedChannels(followingData, 0).slice(0); + const followingTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage + 1]); + let fromIdx, toIdx; + fromIdx = indexInPage - (AG_numberOfVisiblePoints / 2); // We need to read starting from here from the current page + AG_currentIndex = toIdx = fromIdx + AG_numberOfVisiblePoints - AG_allPoints[0].length; // We need to read up to here from next page + for (let idx = 0; idx < AG_allPoints.length; idx++) { + let idy; + const oneLine = []; + // Push data that is from this slice + for (idy = fromIdx; idy < AG_allPoints[0].length; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + // Now that that is from next slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([followingTimeData[idy], AG_addTranslationStep(followingData[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = AG_time.slice(fromIdx).concat(followingTimeData.slice(0, toIdx)); + // Since next page is already loaded, that becomes the current page + AG_allPoints = followingData; + AG_time = followingTimeData; + totalPassedData = (currentPage + 1) * dataPageSize; + currentDataFileIndex = currentPage + 1; + isNextDataLoaded = true; + isNextTimeDataLoaded = true; +} + +/* + * Just re-populate whole displayedPoints and displayedTimes given a start and end index. + */ +function prepareDisplayData(fromIdx, toIdx, pointsArray, timeArray) { + + for (let idx = 0; idx < pointsArray.length; idx++) { + let oneLine = []; + for (let idy = fromIdx; idy < toIdx; idy++) { + oneLine.push([timeArray[idy], AG_addTranslationStep(pointsArray[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = timeArray.slice(fromIdx, toIdx) +} + +/* + * Read the next data file asyncronously. Also get the corresponding time data file. + */ +function loadNextDataFile() { + AG_isLoadStarted = true; + const nx_idx = getNextDataFileIndex(); + cachedFileIndex = nx_idx; + AG_readFileDataAsynchronous(nrOfPagesSet, noOfChannelsPerSet, nx_idx, maxChannelLength, 0); + readTimeData(nx_idx, true); +} + +function changeCurrentDataFile() { + if (!isNextDataLoaded || !isNextTimeDataLoaded) { + return; + } + + if (cachedFileIndex !== getNextDataFileIndex()) { + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + nextData = []; + nextTimeData = []; + return; + } + + const speed = _AG_get_speed(100); + const longestChannelLength = AG_allPoints[longestChannelIndex].length; + + if (speed > 0) { + totalPassedData = totalPassedData + longestChannelLength; + if (longestChannelLength < AG_currentIndex) { + AG_currentIndex = -(longestChannelLength - AG_currentIndex); + } else { + AG_currentIndex = 0; + } + } else if (speed < 0) { + totalPassedData = totalPassedData - longestChannelLength; + if (totalPassedData < 0) { + totalPassedData = 0; + } + } else { + return; + } + + AG_allPoints = nextData.slice(0); + nextData = []; + AG_time = nextTimeData.slice(0); + nextTimeData = []; + currentDataFileIndex = getNextDataFileIndex(); + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + + if (speed < 0) { + AG_currentIndex = longestChannelLength + AG_currentIndex; + } +} + +function shouldLoadNextDataFile() { + if (!AG_isLoadStarted && maxDataFileIndex > 0) { + const nextFileIndex = getNextDataFileIndex(); + const speed = _AG_get_speed(1); // Assume left to right pass of data + if (currentDataFileIndex !== nextFileIndex) { + if ((speed > 0) && (maxChannelLength - AG_currentIndex < threshold * AG_numberOfVisiblePoints)) { + return true; + } + if ((speed < 0) && (AG_currentIndex - AG_numberOfVisiblePoints < threshold * AG_numberOfVisiblePoints)) { + return true; + } + } + } + return false; +} + +/* + * In case of multiple arrays find out which has the most data files that need + * to be loaded. + */ +function setMaxDataFileIndex(nrOfPagesPerArray) { + let max_ln = 0; + for (let i = 0; i < nrOfPagesPerArray.length; i++) { + if (nrOfPagesPerArray[i] > max_ln) { + max_ln = nrOfPagesPerArray[i]; + } + } + maxDataFileIndex = max_ln - 1; +} + +/* + * Return the index of the next data file that should be loaded. + */ +function getNextDataFileIndex() { + let nextIndex; + const speed = _AG_get_speed(100); + if (speed > 0) { + nextIndex = currentDataFileIndex + 1; + if (nextIndex >= maxDataFileIndex) { + return maxDataFileIndex; + } + } else { + nextIndex = currentDataFileIndex - 1; + if (nextIndex <= 0) { + return 0; + } + } + return nextIndex; +} + +function AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex) { + if (dataSetIndex >= nrOfPages.length) { + isNextDataLoaded = true; + // keep data only for the selected channels + const r = _AG_getSelectedDataAndLongestChannelIndex(nextData); + longestChannelIndex = r.longestChannelIndex; + nextData = r.selectedData; //todo: occasional shape mismatch 3d <- 2d + return; + } + if (nrOfPages[dataSetIndex] - 1 < currentFileIndex && AG_isLoadStarted) { + // todo: assumed that this is computing a padding for smaller signals. check if this is really the purpose of this + let j; + let padding = []; + let oneChannel = []; + for (j = 0; j < maxChannelLength; j++) { + oneChannel.push(0); + } + for (j = 0; j < noOfChannelsPerSet[dataSetIndex]; j++) { + padding.push(oneChannel); + } + nextData.push(padding); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } else { + doAjaxCall({ + url: readDataPageURL(baseDataURLS[dataSetIndex], currentFileIndex * dataPageSize, (currentFileIndex + 1) * dataPageSize, tsStates[dataSetIndex], tsModes[dataSetIndex]), + success: function (data) { + if (AG_isLoadStarted) { + data = $.parseJSON(data); + const result = parseData(data, dataSetIndex); + nextData.push(result); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } + } + }); + } +} + +/* + * Data is received from the HLPR_parseJSON as a 500/74 array. We need to transform it + * into an 74/500 one and in the transformation also replace all NaN values. + */ +function parseData(dataArray, dataSetIndex) { + + let result = []; + for (let i = 0; i < noOfChannelsPerSet[dataSetIndex]; i++) { + result.push([]); + } + for (let j = 0; j < dataArray.length; j++) { + for (let k = 0; k < noOfChannelsPerSet[dataSetIndex]; k++) { + let arrElem = dataArray[j][k]; + if (arrElem === 'NaN') { + nanValueFound = true; + arrElem = 0; + } + result[k].push(arrElem); + } + } + return result; +} + +/** + * + * @param fileIndex + * @param asyncRead true only if the file should be read asynchronous + */ +function readTimeData(fileIndex, asyncRead) { + if (timeSetUrls[longestChannelIndex].length <= fileIndex) { + nextTimeData = []; + for (let i = 0; i < maxChannelLength; i++) { + nextTimeData.push(totalPassedData + i); + } + isNextTimeDataLoaded = true; + } else { + if (asyncRead) { + doAjaxCall({ + url: timeSetUrls[longestChannelIndex][fileIndex], + success: function (data) { + nextTimeData = $.parseJSON(data); + isNextTimeDataLoaded = true; + } + }); + } else { + nextTimeData = HLPR_readJSONfromFile(timeSetUrls[longestChannelIndex][fileIndex]); + isNextTimeDataLoaded = true; + } + } +} + +function getArrayFromDataFile(dataFile) { + let fileData = dataFile.replace(/\n/g, " ").replace(/\t/g, " "); + let arrayData = $.trim(fileData).split(" "); + for (let i = 0; i < arrayData.length; i++) { + arrayData[i] = parseFloat(arrayData[i]); + } + return arrayData; +} + +function getDisplayedChannels(listOfAllChannels, offset) { + let selectedData = []; + for (let i = 0; i < displayedChannels.length; i++) { + if (listOfAllChannels[displayedChannels[i] - offset] !== undefined) { + selectedData.push(listOfAllChannels[displayedChannels[i] - offset].slice(0)); + } + } + return selectedData; +} From 3dd5c31471491a5e44738f1021045d75bdca9a6d Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 1 Aug 2018 14:53:58 +1000 Subject: [PATCH 30/53] TVB-2379 Rename animated_graph.js to dualBrainViewer.js --- .../new_dual_brain/scripts/animated_graph.js | 1129 ----------------- 1 file changed, 1129 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js deleted file mode 100644 index 50cf15b12..000000000 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js +++ /dev/null @@ -1,1129 +0,0 @@ -/** - * TheVirtualBrain-Framework Package. This package holds all Data Management, and - * Web-UI helpful to run brain-simulations. To use it, you also need do download - * TheVirtualBrain-Scientific Package (for simulators). See content of the - * documentation-folder for more details. See also http://www.thevirtualbrain.org - * - * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others - * - * This program is free software: you can redistribute it and/or modify it under the - * terms of the GNU General Public License as published by the Free Software Foundation, - * either version 3 of the License, or (at your option) any later version. - * This program is distributed in the hope that it will be useful, but WITHOUT ANY - * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A - * PARTICULAR PURPOSE. See the GNU General Public License for more details. - * You should have received a copy of the GNU General Public License along with this - * program. If not, see . - * - **/ - -/* globals doAjaxCall, readDataPageURL, HLPR_readJSONfromFile */ - -// //it contains all the points that have to be/have been displayed (it contains all the points from the read file); -// //it is an array of arrays (each array contains the points for a certain line chart) -var AG_allPoints = []; -// it supplies the labels for x axis (time in milliseconds) -var AG_time = []; -//it is used for clearing timing events (the event that calls the drawGraph method after a specified time-interval) -var t = null; -//how many elements will be visible on the screen -//computed on the server -var AG_numberOfVisiblePoints = 0; -//all the points that are visible on the screen at a certain moment; the points are read from the AG_allPoints array -//and are translated with a value equal to [AG_translationStep * (AG_noOfLines - the index of the current line)] -//THE FORM of this matrix is: [ [[t1, a1], [t2, a2], ...], [[t1, b1], [t2, b2], ...], ..., [[t1, n1], [t2, n2], ...]] -// t1, t2, ... - represents time that is visible on the screen at a certain moment; -// a1, a2,... - represents the translated values -var AG_displayedPoints = []; -//All the times values that are displayed at a certain moment. To be used by the vertical time line. -var AG_displayedTimes = []; -//the last element that was displayed on the screen is located at this index; the index refers to AG_allPoints array -var AG_currentIndex = 0; -//this var should be set to the length of the AG_allPoints array -var AG_noOfLines = 0; -// the step used for translating the drawn line charts; we translate the drawn line charts because we don't want them to overlap -// the lines will be translated with AG_translationStep * AG_computedStep -var AG_translationStep = 1; -// a scaling factor for the displayed signal -var AG_scaling = 1; -// this var is computed on the server. It is used for line translation (AG_translationStep * AG_computedStep). -var AG_computedStep = 50; -//The normalization steps for each of the channels, in order to bring them centered near the channel bar -var AG_normalizationSteps = []; -//If the animation is paused using pause/start button -var AG_isStopped = false; -//If animation speed is set at a 0 value -var AG_isSpeedZero = false; -//the number of points that are shifted/unshift at a moment -var noOfShiftedPoints = 1; -// List of channels that will be submited on a change of the displayed channels -var AG_submitableSelectedChannels = []; -// contains the indexes of the channels that are displayed -var displayedChannels = []; -// a list of urls pointing to the files from where we should read the time -var timeSetUrls = []; -//a list containing the number of channel in each file specified in 'dataSetUrls' fields -var noOfChannelsPerSet = []; -// the number of points from the longest channel -var maxChannelLength = 0; -// the maximum number of data files from all the submited datatypes -var maxDataFileIndex = 0; -// represents the file index from the dataset that is displayed in the chart -var currentDataFileIndex = 0; -// contains the parsed data for the next file from the dataset -var nextData = []; -// contains the time for the next file from the dataset -var nextTimeData = []; -// true only if the next file from dataset was loaded into memory -var isNextDataLoaded = false; -// true only if the next time data was loaded into memory -var isNextTimeDataLoaded = false; -// true only if the the process of loading a file is started -var AG_isLoadStarted = false; -// this is the number of steps left before updating the next file -var threshold = 10; -// the amount of data that has passed -var totalPassedData = 0; -// the number of channels -var totalNumberOfChannels = 0; -// true only if any of the displayed channels contains NaN values -var nanValueFound = false; -//Channel prefix for each array of data -var channelPrefix = "Channel: "; -// -var totalTimeLength = 0; -//Default values for the x and y axis of the plot -//NOTE: do not remove from the axis AG_options 'labelWidth' and 'labelHeight' because -//this will slow down the animation -var lbl_x_width = 100; -var lbl_x_height = 30; -var zoom_range = [0.1, 20]; - -var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; -var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; - -// the index of the cached file (the file that was loaded asynchronous) -var cachedFileIndex = 0; -var labelX = ""; -var chartTitle = ""; -//The displayed labels for the graph -var chanDisplayLabels = []; -// setup plot -var AG_options = { - series: { - shadowSize: 0, - color: 'blue' - }, // drawing is faster without shadows - lines: { - lineWidth: 1, - show: true - }, - yaxis: AG_defaultYaxis, - xaxis: AG_defaultXaxis, - grid: { - backgroundColor: 'white', - hoverable: true, - clickable: true - }, - points: { - show: false, - radius: 0.001 - }, - zoom: { - interactive: false - }, - selection: { - mode: "xy" - }, - legend: { - show: false - }, - hooks: { - processRawData: [processRawDataHook] - } -}; - -var DEFAULT_MAX_CHANNELS = 10; -var plot = null; - -var followingLine = []; -//The required position from which the following vertical time line will start moving with the array -//Expressed as a number from [0, 1], 0 - start from begining, 1 start only at end -var procentualLinePosition = 0.5; -//The actual position in the graph of the following vertical line. Start from -speed to account for the initial translation. -var currentLinePosition = 0; -//The number of points used to display the vertical line. -var numberOfPointsForVerticalLine = 1000; -var isDoubleView = false; - -var AG_homeViewYValues = []; -var AG_homeViewXValues = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; -//This will be set to true in the launch_viewer method called by burst small previews -var isSmallPreview = false; - -var targetVerticalLinePosition; - -// The base url for calling any methods on a given datatype -var baseDataURLS = []; -var nrOfPagesSet = []; -var dataPageSize = []; -var tsModes = [0, 0, 0]; -var tsStates = [0, 0, 0]; -var longestChannelIndex = 0; - -// region selection component -var AG_regionSelector = null; -// State mode selector. Used as a global only in dual view -var AG_modeSelector = null; - -function resizeToFillParent() { - const canvas = $('#EEGcanvasDiv'); - let container, width, height; - - if (!isSmallPreview) { - // Just use parent section width and height. For width remove some space for the labels to avoid scrolls - // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. - container = canvas.parent(); - width = container.width() - 40; - height = container.height() - 80; - } else { - container = $('body'); - width = container.width() - 40; - height = container.height() - 20; - } - canvas.width(width).height(height); -} - -window.onresize = function () { - resizeToFillParent(); - redrawPlot(plot.getData()); -}; - -/** - * Animated graph entry point - */ -function AG_startAnimatedChart(ag_settings) { - isSmallPreview = false; - _AG_initGlobals(ag_settings); - _AG_initPaginationState(ag_settings.number_of_visible_points); - _AG_preStart(); - drawSliderForScale(); - drawSliderForAnimationSpeed(); - _AG_init_selection(ag_settings.measurePointsSelectionGIDs); - - bindHoverEvent(); - initializeCanvasEvents(); - if (!ag_settings.extended_view) { - bindZoomEvent(); - } -} - -function AG_startAnimatedChartPreview(ag_settings) { - isSmallPreview = true; - AG_isStopped = true; - _AG_initGlobals(ag_settings); - _AG_initPaginationState(ag_settings.number_of_visible_points); - _AG_preStart(); - - // Initialize AG_submitableSelectedChannels - // warning: Assumes channel values are a range - if (AG_submitableSelectedChannels.length === 0) { - // Viewer breaks if this is empty. Fill the first few channels - const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); - for (let i = 0; i < defaultSelectionLength; i++) { - AG_submitableSelectedChannels.push(i); - } - } - - refreshChannels(); -} - -function AG_rePaginate(number_of_visible_points) { - _AG_initPaginationState(number_of_visible_points); - $('#display-page-size').html('' + number_of_visible_points); - refreshChannels(); - if (isDoubleView) { - initActivityData(); - } -} - -/** - * Initialize global state. Part of the AG startup. - * @private - */ -function _AG_initGlobals(ag_settings) { - isDoubleView = ag_settings.extended_view; - // dataSetUrls = $.parseJSON(dataSetPaths); - baseDataURLS = ag_settings.baseURLS; - nrOfPagesSet = ag_settings.nrOfPages; - dataPageSize = ag_settings.pageSize; - chanDisplayLabels = ag_settings.channelLabels; - noOfChannelsPerSet = ag_settings.channelsPerSet; - timeSetUrls = ag_settings.timeSetPaths; - maxChannelLength = parseInt(ag_settings.pageSize); - AG_normalizationSteps = ag_settings.normalizedSteps; - setMaxDataFileIndex(nrOfPagesSet); - totalNumberOfChannels = ag_settings.noOfChannels; - totalTimeLength = ag_settings.totalLength; - nanValueFound = ag_settings.nan_value_found; - AG_computedStep = ag_settings.translationStep; -} - -/** - * Initialize pagination. Part of AG startup. - * @private - */ -function _AG_initPaginationState(number_of_visible_points) { - AG_numberOfVisiblePoints = parseInt(number_of_visible_points); - if (AG_numberOfVisiblePoints > maxChannelLength) { - AG_numberOfVisiblePoints = maxChannelLength; - } - targetVerticalLinePosition = AG_numberOfVisiblePoints * procentualLinePosition; -} - -/** - * Misc common startup logic. Part of AG startup - * @private - */ -function _AG_preStart() { - resizeToFillParent(); -} - -/** - * Creates a selection component for each time series displayed by this eeg view - * Part of AG startup - * The order of the filterGids determines the order of the selectors - * It must have the same ordering as all other timeseries arrays - * @private - */ -function _AG_init_selection(filterGids) { - let i; - let selectors = []; - - /** - * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels - * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) - */ - function getSelectedChannelsAsGlobalIndices() { - let all_selected = []; - let offset = 0; - - for (let i = 0; i < selectors.length; i++) { - const selector = selectors[i]; - const selected_in_current = selector.val(); - - for (let j = 0; j < selected_in_current.length; j++) { - all_selected.push(offset + parseInt(selected_in_current[j], 10)); - } - offset += selector._allValues.length; - } - return all_selected; - } - - // init selectors - let selectorId, selector; - - for (i = 0; i < filterGids.length; i++) { - selectorId = "#channelSelector" + i; - selector = TVBUI.regionSelector(selectorId, {filterGid: filterGids[i]}); - selector.change(function (current_selection) { - AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); - refreshChannels(); - }); - selectors.push(selector); - } - // the first selector is special. we select by default some channels in it and in case of a dual view - // his selection is synchronized with the brain - AG_regionSelector = selectors[0]; - - // Initialize AG_submitableSelectedChannels - AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); - - if (AG_submitableSelectedChannels.length === 0) { - // Viewer breaks if this is empty. Fill the first few channels - const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); - // we take the values form the dom, a range(defaultSelectionLength) is not a valid selection if there are multiple time series - AG_submitableSelectedChannels = AG_regionSelector._allValues.slice(0, defaultSelectionLength); - AG_regionSelector.val(AG_submitableSelectedChannels); - } - - // Init the mode selection components. Assumes that there are part of the selector dom - let modeSelectors = []; - for (i = 0; i < filterGids.length; i++) { - selectorId = "#channelSelector" + i; - selector = TVBUI.modeAndStateSelector(selectorId, i); - selector.modeChanged(_AG_changeMode); - selector.stateVariableChanged(_AG_changeStateVariable); - modeSelectors.push(selector); - } - // The dual view needs to subscribe to this selector; so we save it like AG_regionSelector - AG_modeSelector = modeSelectors[0]; - - refreshChannels(); -} - -/** - * Read speed from the dom - * @param defaultSpeed default speed when there is no speed slider - * @private - */ -function _AG_get_speed(defaultSpeed) { - let speed = defaultSpeed; - if (!isSmallPreview && !isDoubleView) { - speed = $("#ctrl-input-speed").slider("value"); - } - return speed; -} - -/* - * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for - * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a - * 'Home' action in a series of zoom events. - */ -function AG_createYAxisDictionary(nr_channels) { - let ticks, yaxis_dict, increment; - - if (AG_translationStep > 0) { - ticks = []; - const step = AG_computedStep * AG_translationStep; - for (let i = 0; i < nr_channels; i++) { - ticks.push([i * step, chanDisplayLabels[displayedChannels[i]]]); - } - yaxis_dict = { - min: -step, - max: (nr_channels + 1) * step, - ticks: ticks, - zoomRange: [0.1, 20] - }; - increment = nr_channels * step / numberOfPointsForVerticalLine; - if (increment === 0) throw "infinite loop"; - for (let k = -step; k < (nr_channels + 1) * step; k += increment) { - followingLine.push([0, k]); - } - } else { - ticks = [0, 'allChannels']; - yaxis_dict = { - min: -AG_computedStep / 2, - max: AG_computedStep / 2, - ticks: ticks, - zoomRange: [0.1, 20] - }; - increment = AG_computedStep / numberOfPointsForVerticalLine; - if (increment === 0) throw "infinite loop"; - for (let kk = -AG_computedStep / 2; kk < AG_computedStep / 2; kk += increment) { - followingLine.push([0, kk]); - } - } - AG_options.yaxis = yaxis_dict; - AG_homeViewYValues = [yaxis_dict.min, yaxis_dict.max]; - AG_defaultYaxis = yaxis_dict; -} - -function refreshChannels() { - submitSelectedChannels(false); - drawGraph(false, noOfShiftedPoints); -} - -function _AG_changeMode(tsIndex, val) { - tsModes[tsIndex] = parseInt(val); - refreshChannels(); -} - -function _AG_changeStateVariable(tsIndex, val) { - tsStates[tsIndex] = parseInt(val); - refreshChannels(); -} - -function _AG_getSelectedDataAndLongestChannelIndex(data) { - let offset = 0; - let selectedData = []; - let channelLengths = []; - - for (let i = 0; i < data.length; i++) { - const selectedChannels = getDisplayedChannels(data[i], offset); - offset += data[i].length; - if (selectedChannels.length > 0) { - channelLengths.push(selectedChannels[0].length); - } else { - channelLengths.push(-1); - } - selectedData = selectedData.concat(selectedChannels); - } - const longestChannelIndex = channelLengths.indexOf(Math.max.apply(Math, channelLengths)); - return {selectedData: selectedData, longestChannelIndex: longestChannelIndex} -} - -/* - * Get required data for the channels in AG_submitableSelectedChannels. If none - * exist then just use the previous 'displayedChannels' (or default in case of first run). - */ -function submitSelectedChannels(isEndOfData) { - - AG_currentIndex = AG_numberOfVisiblePoints; - if (AG_submitableSelectedChannels.length === 0) { - AG_submitableSelectedChannels = displayedChannels.slice(); - } - - if (!(isEndOfData && maxDataFileIndex === 0)) { - AG_allPoints = []; - displayedChannels = AG_submitableSelectedChannels.slice(0); - generateChannelColors(displayedChannels.length); - - let results = []; - for (let i = 0; i < nrOfPagesSet.length; i++) { - const dataURL = readDataPageURL(baseDataURLS[i], 0, dataPageSize, tsStates[i], tsModes[i]); - const data = HLPR_readJSONfromFile(dataURL); - results.push(parseData(data, i)); - } - const r = _AG_getSelectedDataAndLongestChannelIndex(results); - AG_allPoints = AG_allPoints.concat(r.selectedData); - longestChannelIndex = r.longestChannelIndex; - - // keep data only for the selected channels - AG_noOfLines = AG_allPoints.length; - } - - AG_displayedPoints = []; - AG_displayedTimes = []; - for (let ii = 0; ii < AG_noOfLines; ii++) { - AG_displayedPoints.push([]); - } - - if (!(isEndOfData && maxDataFileIndex === 0)) { - //read time - readTimeData(0, false); - AG_time = nextTimeData.slice(0); - } - // reset data - nextData = []; - nextTimeData = []; - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - currentDataFileIndex = 0; - totalPassedData = 0; - currentLinePosition = 0; - if (nanValueFound) { - displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); - } - - // draw the first 'AG_numberOfVisiblePoints' points - redrawCurrentView(); - if (!isSmallPreview) { - AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; - AG_scaling = $("#ctrl-input-scale").slider("value"); - } else { - AG_translationStep = 1; - } - - AG_createYAxisDictionary(AG_noOfLines); - redrawPlot([]); - resetToDefaultView(); - if (AG_isStopped) { - AG_isStopped = false; - drawGraph(false, noOfShiftedPoints); - AG_isStopped = true; - } else { - drawGraph(false, noOfShiftedPoints); - } -} - -/** - * This method decides if we are at the beginning or end of the graph, in which case we only need - * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. - */ -function shouldMoveLine(direction, shiftNo) { - shiftNo = shiftNo || 1; - let isEndOfGraph = false; - let isStartOfGraph = false; - if (direction === 1) { - isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); - isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); - if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { - isEndOfGraph = false; - } - } else { - isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); - isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); - if (AG_displayedTimes[currentLinePosition] <= 0) { - isStartOfGraph = false; - } - } - - return isStartOfGraph || isEndOfGraph; -} - -var isEndOfData = false; -var AG_channelColorsDict = {}; -var AG_reversedChannelColorsDict = {}; - -/* - * Generate different colors for each channel. - */ -function generateChannelColors(nr_of_channels) { - AG_channelColorsDict = {}; - AG_reversedChannelColorsDict = {}; - let step = parseInt(255 / nr_of_channels); - for (let i = 0; i < nr_of_channels; i++) { - const color = "rgb(" + 250 * (i % 2) + "," + (200 - i * step) + "," + 220 * ((i + 1) % 2) + ")"; - AG_channelColorsDict[color] = i; - AG_reversedChannelColorsDict[i] = color; - } -} - -/* - * Get y-axis labels and update colors to correspond to each channel - */ -function setLabelColors() { - const labels = $('.flot-y-axis .tickLabel'); - for (let i = 0; i < labels.length; i++) { - const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); - if (chan_idx >= 0) { - labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; - labels[i].style.left = 80 + (i % 2) * 40 + 'px'; - } - } -} - -/* - * This method draw the actual plot. The 'executeShift' parameter decides if a shift is - * to be done, or just use the previous data points. 'shiftNo' decides the number of points - * that will be shifted. - */ -function drawGraph(executeShift, shiftNo) { - let i; - noOfShiftedPoints = shiftNo; - if (isEndOfData) { - isEndOfData = false; - submitSelectedChannels(true); - } - if (t !== null && t !== undefined) { - clearTimeout(t); - } - if (AG_isStopped) { - return; - } - if (shouldLoadNextDataFile()) { - loadNextDataFile(); - } - - let direction = 1; - if (_AG_get_speed(1) < 0) { - direction = -1; - } - - let moveLine = shouldMoveLine(direction, noOfShiftedPoints); - //Increment line position in case we need to move the line - if (moveLine && executeShift && !AG_isSpeedZero) { - currentLinePosition = currentLinePosition + noOfShiftedPoints * direction; - } - - if (currentLinePosition >= AG_numberOfVisiblePoints) { - isEndOfData = true; - } - - if (executeShift && !AG_isSpeedZero && !moveLine) { - let count = 0; - if (direction === -1) { - if (currentDataFileIndex > 0 || AG_currentIndex > AG_numberOfVisiblePoints) { - count = 0; - while (count < noOfShiftedPoints && AG_currentIndex - count > AG_numberOfVisiblePoints) { - count = count + 1; - AG_displayedTimes.unshift(AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count]); - for (i = 0; i < AG_displayedPoints.length; i++) { - AG_displayedPoints[i].unshift( - [AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count], - AG_addTranslationStep(AG_allPoints[i][AG_currentIndex - AG_numberOfVisiblePoints - count], i) - ]); - AG_displayedPoints[i].pop(); - } - AG_displayedTimes.pop(); - } - - if (AG_currentIndex - count > AG_numberOfVisiblePoints) { - AG_currentIndex = AG_currentIndex - count; - } else { - AG_currentIndex = Math.min(AG_currentIndex, AG_numberOfVisiblePoints); - if (currentDataFileIndex > 0 && isNextDataLoaded) { - changeCurrentDataFile(); - } - } - } - } else { - if (totalTimeLength > AG_currentIndex + totalPassedData) { - // here we add new 'noOfShiftedPoints' points to the chart and remove the first 'noOfShiftedPoints' visible points - count = 0; - while (count < noOfShiftedPoints && totalTimeLength > AG_currentIndex + count) { - AG_displayedTimes.push(AG_time[AG_currentIndex + count]); - for (i = 0; i < AG_displayedPoints.length; i++) { - AG_displayedPoints[i].push( - [AG_time[AG_currentIndex + count], - AG_addTranslationStep(AG_allPoints[i][AG_currentIndex + count], i) - ]); - AG_displayedPoints[i].shift(); - } - AG_displayedTimes.shift(); - count = count + 1; - } - - if (AG_currentIndex + count < AG_allPoints[longestChannelIndex].length) { - AG_currentIndex = AG_currentIndex + count; - } else { - AG_currentIndex = Math.max(AG_currentIndex, AG_allPoints[longestChannelIndex].length); - if (maxDataFileIndex > 0 && isNextDataLoaded) { - changeCurrentDataFile(); - } - } - } - } - } - if (!AG_isSpeedZero) { - for (i = 0; i < followingLine.length; i++) { - followingLine[i][0] = AG_displayedTimes[currentLinePosition]; - } - let preparedData = []; - for (let j = 0; j < AG_displayedPoints.length; j++) { - preparedData.push({data: AG_displayedPoints[j].slice(0), color: AG_reversedChannelColorsDict[j]}); - } - preparedData.push({data: followingLine, color: 'rgb(255, 0, 0)'}); - plot.setData(preparedData); - plot.setupGrid(); - plot.draw(); - setLabelColors(); - } - if (!isDoubleView) { - t = setTimeout("drawGraph(true, noOfShiftedPoints)", getTimeoutBasedOnSpeed()); - } -} - -/* - * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. - */ -function redrawPlot(data) { - const target = $('#EEGcanvasDiv'); - const resizerChildren = target.children('.ui-resizable-handle'); - for (let i = 0; i < resizerChildren.length; i++) { - target[0].removeChild(resizerChildren[i]); - } - plot = $.plot(target, data, $.extend(true, {}, AG_options)); - for (let j = 0; j < resizerChildren.length; j++) { - target[0].appendChild(resizerChildren[j]); - } - setLabelColors(); -} - - -/** - * This hook will be called before Flot copies and normalizes the raw data for the given - * series. If the function fills in datapoints.points with normalized - * points and sets datapoints.pointsize to the size of the points, - * Flot will skip the copying/normalization step for this series. - */ -function processRawDataHook(plot, series, data, datapoints) { - datapoints.format = [ - {x: true, number: true, required: true}, - {y: true, number: true, required: true} - ]; - datapoints.pointsize = 2; - - for (let i = 0; i < data.length; i++) { - datapoints.points.push(data[i][0]); - datapoints.points.push(data[i][1]); - } - - series.xaxis.used = series.yaxis.used = true; -} - - -/** - * Translate the given value. - * We use this method to translate the values for the drawn line charts because we don't want them to overlap. - * - * @param value the value that should be translated. - * @param index the number of AG_translationSteps that should be used for translating the given value. - * @return {number} - */ -function AG_addTranslationStep(value, index) { - return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; -} - -function getTimeoutBasedOnSpeed() { - const currentAnimationSpeedValue = _AG_get_speed(40); - if (currentAnimationSpeedValue === 0) { - return 300; - } - const timeout = 10 - Math.abs(currentAnimationSpeedValue); - if (timeout === 9) { - return 3000; - } - if (timeout === 8) { - return 2000; - } - if (timeout === 7) { - return 1000; - } - return timeout * 100 + 25; -} - -/* - * Load the data from a given step and center plot around that step. - */ -function loadEEGChartFromTimeStep(step) { - // Read all data for the page in which the selected step falls into - const chunkForStep = Math.floor(step / dataPageSize); - const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); - const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; - AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); - AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); - - totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page - currentDataFileIndex = chunkForStep; - AG_displayedPoints = []; - const indexInPage = step % dataPageSize; // This is the index in the current page that step will have - let fromIdx, toIdx; - currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times - if (indexInPage <= AG_numberOfVisiblePoints / 2) { - if (chunkForStep === 0) { - // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first - // AG_numberOfVisiblePoints values - AG_currentIndex = AG_numberOfVisiblePoints; - currentLinePosition = indexInPage; - prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from before this page - addFromPreviousPage(indexInPage, chunkForStep); - } - } else { - if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { - if (chunkForStep >= nrOfPagesSet[0] - 1) { - // We are at the end of the graph. The line is starting to move further right from the middle position. We are just - // displaying the last AG_numberOfVisiblePoints from the last page - if (AG_time.length > AG_numberOfVisiblePoints) { - fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; - } else { - fromIdx = 0; - } - toIdx = AG_time.length - 1; - AG_currentIndex = toIdx; - currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from after this page - addFromNextPage(indexInPage, chunkForStep); - } - } else { - // We are somewhere in the middle of the graph. - fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; - toIdx = indexInPage + AG_numberOfVisiblePoints / 2; - AG_currentIndex = toIdx; - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } - } - nextData = []; - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; -} - -/* - * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center - * around indexInPage, if some of the required data is on the previous page. - */ -function addFromPreviousPage(indexInPage, currentPage) { - - const previousPageUrl = readDataPageURL(baseDataURLS[0], (currentPage - 1) * dataPageSize, currentPage * dataPageSize, tsStates[0], tsModes[0]); - let previousData = parseData(HLPR_readJSONfromFile(previousPageUrl), 0); - previousData = getDisplayedChannels(previousData, 0).slice(0); - const previousTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage - 1]); - // Compute which slices we would need from the 'full' two-pages data. - // We only need the difference so to center indexInPage at AG_numberOfVisiblePoints / 2 - let fromIdx, toIdx; - fromIdx = previousData[0].length - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is from where we need to read from previous data - AG_currentIndex = toIdx = AG_numberOfVisiblePoints - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is where we need to add from the current page - // Just generate displayed point and displayed times now - for (let idx = 0; idx < previousData.length; idx++) { - let idy; - let oneLine = []; - // Push data that is from previos slice - for (idy = fromIdx; idy < previousData[0].length; idy++) { - oneLine.push([previousTimeData[idy], AG_addTranslationStep(previousData[idx][idy], idx)]); - } - // Now that that is from our current slice - for (idy = 0; idy < toIdx; idy++) { - oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = previousTimeData.slice(fromIdx).concat(AG_time.slice(0, toIdx)); - previousData = null; -} - -/* - * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center - * around indexInPage, if some of the required data is on the next page. - */ -function addFromNextPage(indexInPage, currentPage) { - - const followingPageUrl = readDataPageURL(baseDataURLS[0], (currentPage + 1) * dataPageSize, (currentPage + 2) * dataPageSize, tsStates[0], tsModes[0]); - let followingData = parseData(HLPR_readJSONfromFile(followingPageUrl), 0); - followingData = getDisplayedChannels(followingData, 0).slice(0); - const followingTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage + 1]); - let fromIdx, toIdx; - fromIdx = indexInPage - (AG_numberOfVisiblePoints / 2); // We need to read starting from here from the current page - AG_currentIndex = toIdx = fromIdx + AG_numberOfVisiblePoints - AG_allPoints[0].length; // We need to read up to here from next page - for (let idx = 0; idx < AG_allPoints.length; idx++) { - let idy; - const oneLine = []; - // Push data that is from this slice - for (idy = fromIdx; idy < AG_allPoints[0].length; idy++) { - oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); - } - // Now that that is from next slice - for (idy = 0; idy < toIdx; idy++) { - oneLine.push([followingTimeData[idy], AG_addTranslationStep(followingData[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = AG_time.slice(fromIdx).concat(followingTimeData.slice(0, toIdx)); - // Since next page is already loaded, that becomes the current page - AG_allPoints = followingData; - AG_time = followingTimeData; - totalPassedData = (currentPage + 1) * dataPageSize; - currentDataFileIndex = currentPage + 1; - isNextDataLoaded = true; - isNextTimeDataLoaded = true; -} - -/* - * Just re-populate whole displayedPoints and displayedTimes given a start and end index. - */ -function prepareDisplayData(fromIdx, toIdx, pointsArray, timeArray) { - - for (let idx = 0; idx < pointsArray.length; idx++) { - let oneLine = []; - for (let idy = fromIdx; idy < toIdx; idy++) { - oneLine.push([timeArray[idy], AG_addTranslationStep(pointsArray[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = timeArray.slice(fromIdx, toIdx) -} - -/* - * Read the next data file asyncronously. Also get the corresponding time data file. - */ -function loadNextDataFile() { - AG_isLoadStarted = true; - const nx_idx = getNextDataFileIndex(); - cachedFileIndex = nx_idx; - AG_readFileDataAsynchronous(nrOfPagesSet, noOfChannelsPerSet, nx_idx, maxChannelLength, 0); - readTimeData(nx_idx, true); -} - -function changeCurrentDataFile() { - if (!isNextDataLoaded || !isNextTimeDataLoaded) { - return; - } - - if (cachedFileIndex !== getNextDataFileIndex()) { - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - nextData = []; - nextTimeData = []; - return; - } - - const speed = _AG_get_speed(100); - const longestChannelLength = AG_allPoints[longestChannelIndex].length; - - if (speed > 0) { - totalPassedData = totalPassedData + longestChannelLength; - if (longestChannelLength < AG_currentIndex) { - AG_currentIndex = -(longestChannelLength - AG_currentIndex); - } else { - AG_currentIndex = 0; - } - } else if (speed < 0) { - totalPassedData = totalPassedData - longestChannelLength; - if (totalPassedData < 0) { - totalPassedData = 0; - } - } else { - return; - } - - AG_allPoints = nextData.slice(0); - nextData = []; - AG_time = nextTimeData.slice(0); - nextTimeData = []; - currentDataFileIndex = getNextDataFileIndex(); - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - - if (speed < 0) { - AG_currentIndex = longestChannelLength + AG_currentIndex; - } -} - -function shouldLoadNextDataFile() { - if (!AG_isLoadStarted && maxDataFileIndex > 0) { - const nextFileIndex = getNextDataFileIndex(); - const speed = _AG_get_speed(1); // Assume left to right pass of data - if (currentDataFileIndex !== nextFileIndex) { - if ((speed > 0) && (maxChannelLength - AG_currentIndex < threshold * AG_numberOfVisiblePoints)) { - return true; - } - if ((speed < 0) && (AG_currentIndex - AG_numberOfVisiblePoints < threshold * AG_numberOfVisiblePoints)) { - return true; - } - } - } - return false; -} - -/* - * In case of multiple arrays find out which has the most data files that need - * to be loaded. - */ -function setMaxDataFileIndex(nrOfPagesPerArray) { - let max_ln = 0; - for (let i = 0; i < nrOfPagesPerArray.length; i++) { - if (nrOfPagesPerArray[i] > max_ln) { - max_ln = nrOfPagesPerArray[i]; - } - } - maxDataFileIndex = max_ln - 1; -} - -/* - * Return the index of the next data file that should be loaded. - */ -function getNextDataFileIndex() { - let nextIndex; - const speed = _AG_get_speed(100); - if (speed > 0) { - nextIndex = currentDataFileIndex + 1; - if (nextIndex >= maxDataFileIndex) { - return maxDataFileIndex; - } - } else { - nextIndex = currentDataFileIndex - 1; - if (nextIndex <= 0) { - return 0; - } - } - return nextIndex; -} - -function AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex) { - if (dataSetIndex >= nrOfPages.length) { - isNextDataLoaded = true; - // keep data only for the selected channels - const r = _AG_getSelectedDataAndLongestChannelIndex(nextData); - longestChannelIndex = r.longestChannelIndex; - nextData = r.selectedData; //todo: occasional shape mismatch 3d <- 2d - return; - } - if (nrOfPages[dataSetIndex] - 1 < currentFileIndex && AG_isLoadStarted) { - // todo: assumed that this is computing a padding for smaller signals. check if this is really the purpose of this - let j; - let padding = []; - let oneChannel = []; - for (j = 0; j < maxChannelLength; j++) { - oneChannel.push(0); - } - for (j = 0; j < noOfChannelsPerSet[dataSetIndex]; j++) { - padding.push(oneChannel); - } - nextData.push(padding); - - AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); - } else { - doAjaxCall({ - url: readDataPageURL(baseDataURLS[dataSetIndex], currentFileIndex * dataPageSize, (currentFileIndex + 1) * dataPageSize, tsStates[dataSetIndex], tsModes[dataSetIndex]), - success: function (data) { - if (AG_isLoadStarted) { - data = $.parseJSON(data); - const result = parseData(data, dataSetIndex); - nextData.push(result); - - AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); - } - } - }); - } -} - -/* - * Data is received from the HLPR_parseJSON as a 500/74 array. We need to transform it - * into an 74/500 one and in the transformation also replace all NaN values. - */ -function parseData(dataArray, dataSetIndex) { - - let result = []; - for (let i = 0; i < noOfChannelsPerSet[dataSetIndex]; i++) { - result.push([]); - } - for (let j = 0; j < dataArray.length; j++) { - for (let k = 0; k < noOfChannelsPerSet[dataSetIndex]; k++) { - let arrElem = dataArray[j][k]; - if (arrElem === 'NaN') { - nanValueFound = true; - arrElem = 0; - } - result[k].push(arrElem); - } - } - return result; -} - -/** - * - * @param fileIndex - * @param asyncRead true only if the file should be read asynchronous - */ -function readTimeData(fileIndex, asyncRead) { - if (timeSetUrls[longestChannelIndex].length <= fileIndex) { - nextTimeData = []; - for (let i = 0; i < maxChannelLength; i++) { - nextTimeData.push(totalPassedData + i); - } - isNextTimeDataLoaded = true; - } else { - if (asyncRead) { - doAjaxCall({ - url: timeSetUrls[longestChannelIndex][fileIndex], - success: function (data) { - nextTimeData = $.parseJSON(data); - isNextTimeDataLoaded = true; - } - }); - } else { - nextTimeData = HLPR_readJSONfromFile(timeSetUrls[longestChannelIndex][fileIndex]); - isNextTimeDataLoaded = true; - } - } -} - -function getArrayFromDataFile(dataFile) { - let fileData = dataFile.replace(/\n/g, " ").replace(/\t/g, " "); - let arrayData = $.trim(fileData).split(" "); - for (let i = 0; i < arrayData.length; i++) { - arrayData[i] = parseFloat(arrayData[i]); - } - return arrayData; -} - -function getDisplayedChannels(listOfAllChannels, offset) { - let selectedData = []; - for (let i = 0; i < displayedChannels.length; i++) { - if (listOfAllChannels[displayedChannels[i] - offset] !== undefined) { - selectedData.push(listOfAllChannels[displayedChannels[i] - offset].slice(0)); - } - } - return selectedData; -} From c456bb6d035d7d67c3d2076a4bb3033dbde61b41 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 1 Aug 2018 15:06:14 +1000 Subject: [PATCH 31/53] TVB-2379 Move animated_graph.js to new_dual_brain --- .../new_dual_brain/scripts/animated_graph.js | 1129 +++++++++++++++++ 1 file changed, 1129 insertions(+) create mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js new file mode 100644 index 000000000..50cf15b12 --- /dev/null +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js @@ -0,0 +1,1129 @@ +/** + * TheVirtualBrain-Framework Package. This package holds all Data Management, and + * Web-UI helpful to run brain-simulations. To use it, you also need do download + * TheVirtualBrain-Scientific Package (for simulators). See content of the + * documentation-folder for more details. See also http://www.thevirtualbrain.org + * + * (c) 2012-2017, Baycrest Centre for Geriatric Care ("Baycrest") and others + * + * This program is free software: you can redistribute it and/or modify it under the + * terms of the GNU General Public License as published by the Free Software Foundation, + * either version 3 of the License, or (at your option) any later version. + * This program is distributed in the hope that it will be useful, but WITHOUT ANY + * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A + * PARTICULAR PURPOSE. See the GNU General Public License for more details. + * You should have received a copy of the GNU General Public License along with this + * program. If not, see . + * + **/ + +/* globals doAjaxCall, readDataPageURL, HLPR_readJSONfromFile */ + +// //it contains all the points that have to be/have been displayed (it contains all the points from the read file); +// //it is an array of arrays (each array contains the points for a certain line chart) +var AG_allPoints = []; +// it supplies the labels for x axis (time in milliseconds) +var AG_time = []; +//it is used for clearing timing events (the event that calls the drawGraph method after a specified time-interval) +var t = null; +//how many elements will be visible on the screen +//computed on the server +var AG_numberOfVisiblePoints = 0; +//all the points that are visible on the screen at a certain moment; the points are read from the AG_allPoints array +//and are translated with a value equal to [AG_translationStep * (AG_noOfLines - the index of the current line)] +//THE FORM of this matrix is: [ [[t1, a1], [t2, a2], ...], [[t1, b1], [t2, b2], ...], ..., [[t1, n1], [t2, n2], ...]] +// t1, t2, ... - represents time that is visible on the screen at a certain moment; +// a1, a2,... - represents the translated values +var AG_displayedPoints = []; +//All the times values that are displayed at a certain moment. To be used by the vertical time line. +var AG_displayedTimes = []; +//the last element that was displayed on the screen is located at this index; the index refers to AG_allPoints array +var AG_currentIndex = 0; +//this var should be set to the length of the AG_allPoints array +var AG_noOfLines = 0; +// the step used for translating the drawn line charts; we translate the drawn line charts because we don't want them to overlap +// the lines will be translated with AG_translationStep * AG_computedStep +var AG_translationStep = 1; +// a scaling factor for the displayed signal +var AG_scaling = 1; +// this var is computed on the server. It is used for line translation (AG_translationStep * AG_computedStep). +var AG_computedStep = 50; +//The normalization steps for each of the channels, in order to bring them centered near the channel bar +var AG_normalizationSteps = []; +//If the animation is paused using pause/start button +var AG_isStopped = false; +//If animation speed is set at a 0 value +var AG_isSpeedZero = false; +//the number of points that are shifted/unshift at a moment +var noOfShiftedPoints = 1; +// List of channels that will be submited on a change of the displayed channels +var AG_submitableSelectedChannels = []; +// contains the indexes of the channels that are displayed +var displayedChannels = []; +// a list of urls pointing to the files from where we should read the time +var timeSetUrls = []; +//a list containing the number of channel in each file specified in 'dataSetUrls' fields +var noOfChannelsPerSet = []; +// the number of points from the longest channel +var maxChannelLength = 0; +// the maximum number of data files from all the submited datatypes +var maxDataFileIndex = 0; +// represents the file index from the dataset that is displayed in the chart +var currentDataFileIndex = 0; +// contains the parsed data for the next file from the dataset +var nextData = []; +// contains the time for the next file from the dataset +var nextTimeData = []; +// true only if the next file from dataset was loaded into memory +var isNextDataLoaded = false; +// true only if the next time data was loaded into memory +var isNextTimeDataLoaded = false; +// true only if the the process of loading a file is started +var AG_isLoadStarted = false; +// this is the number of steps left before updating the next file +var threshold = 10; +// the amount of data that has passed +var totalPassedData = 0; +// the number of channels +var totalNumberOfChannels = 0; +// true only if any of the displayed channels contains NaN values +var nanValueFound = false; +//Channel prefix for each array of data +var channelPrefix = "Channel: "; +// +var totalTimeLength = 0; +//Default values for the x and y axis of the plot +//NOTE: do not remove from the axis AG_options 'labelWidth' and 'labelHeight' because +//this will slow down the animation +var lbl_x_width = 100; +var lbl_x_height = 30; +var zoom_range = [0.1, 20]; + +var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; + +// the index of the cached file (the file that was loaded asynchronous) +var cachedFileIndex = 0; +var labelX = ""; +var chartTitle = ""; +//The displayed labels for the graph +var chanDisplayLabels = []; +// setup plot +var AG_options = { + series: { + shadowSize: 0, + color: 'blue' + }, // drawing is faster without shadows + lines: { + lineWidth: 1, + show: true + }, + yaxis: AG_defaultYaxis, + xaxis: AG_defaultXaxis, + grid: { + backgroundColor: 'white', + hoverable: true, + clickable: true + }, + points: { + show: false, + radius: 0.001 + }, + zoom: { + interactive: false + }, + selection: { + mode: "xy" + }, + legend: { + show: false + }, + hooks: { + processRawData: [processRawDataHook] + } +}; + +var DEFAULT_MAX_CHANNELS = 10; +var plot = null; + +var followingLine = []; +//The required position from which the following vertical time line will start moving with the array +//Expressed as a number from [0, 1], 0 - start from begining, 1 start only at end +var procentualLinePosition = 0.5; +//The actual position in the graph of the following vertical line. Start from -speed to account for the initial translation. +var currentLinePosition = 0; +//The number of points used to display the vertical line. +var numberOfPointsForVerticalLine = 1000; +var isDoubleView = false; + +var AG_homeViewYValues = []; +var AG_homeViewXValues = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; +//This will be set to true in the launch_viewer method called by burst small previews +var isSmallPreview = false; + +var targetVerticalLinePosition; + +// The base url for calling any methods on a given datatype +var baseDataURLS = []; +var nrOfPagesSet = []; +var dataPageSize = []; +var tsModes = [0, 0, 0]; +var tsStates = [0, 0, 0]; +var longestChannelIndex = 0; + +// region selection component +var AG_regionSelector = null; +// State mode selector. Used as a global only in dual view +var AG_modeSelector = null; + +function resizeToFillParent() { + const canvas = $('#EEGcanvasDiv'); + let container, width, height; + + if (!isSmallPreview) { + // Just use parent section width and height. For width remove some space for the labels to avoid scrolls + // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. + container = canvas.parent(); + width = container.width() - 40; + height = container.height() - 80; + } else { + container = $('body'); + width = container.width() - 40; + height = container.height() - 20; + } + canvas.width(width).height(height); +} + +window.onresize = function () { + resizeToFillParent(); + redrawPlot(plot.getData()); +}; + +/** + * Animated graph entry point + */ +function AG_startAnimatedChart(ag_settings) { + isSmallPreview = false; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + drawSliderForScale(); + drawSliderForAnimationSpeed(); + _AG_init_selection(ag_settings.measurePointsSelectionGIDs); + + bindHoverEvent(); + initializeCanvasEvents(); + if (!ag_settings.extended_view) { + bindZoomEvent(); + } +} + +function AG_startAnimatedChartPreview(ag_settings) { + isSmallPreview = true; + AG_isStopped = true; + _AG_initGlobals(ag_settings); + _AG_initPaginationState(ag_settings.number_of_visible_points); + _AG_preStart(); + + // Initialize AG_submitableSelectedChannels + // warning: Assumes channel values are a range + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + for (let i = 0; i < defaultSelectionLength; i++) { + AG_submitableSelectedChannels.push(i); + } + } + + refreshChannels(); +} + +function AG_rePaginate(number_of_visible_points) { + _AG_initPaginationState(number_of_visible_points); + $('#display-page-size').html('' + number_of_visible_points); + refreshChannels(); + if (isDoubleView) { + initActivityData(); + } +} + +/** + * Initialize global state. Part of the AG startup. + * @private + */ +function _AG_initGlobals(ag_settings) { + isDoubleView = ag_settings.extended_view; + // dataSetUrls = $.parseJSON(dataSetPaths); + baseDataURLS = ag_settings.baseURLS; + nrOfPagesSet = ag_settings.nrOfPages; + dataPageSize = ag_settings.pageSize; + chanDisplayLabels = ag_settings.channelLabels; + noOfChannelsPerSet = ag_settings.channelsPerSet; + timeSetUrls = ag_settings.timeSetPaths; + maxChannelLength = parseInt(ag_settings.pageSize); + AG_normalizationSteps = ag_settings.normalizedSteps; + setMaxDataFileIndex(nrOfPagesSet); + totalNumberOfChannels = ag_settings.noOfChannels; + totalTimeLength = ag_settings.totalLength; + nanValueFound = ag_settings.nan_value_found; + AG_computedStep = ag_settings.translationStep; +} + +/** + * Initialize pagination. Part of AG startup. + * @private + */ +function _AG_initPaginationState(number_of_visible_points) { + AG_numberOfVisiblePoints = parseInt(number_of_visible_points); + if (AG_numberOfVisiblePoints > maxChannelLength) { + AG_numberOfVisiblePoints = maxChannelLength; + } + targetVerticalLinePosition = AG_numberOfVisiblePoints * procentualLinePosition; +} + +/** + * Misc common startup logic. Part of AG startup + * @private + */ +function _AG_preStart() { + resizeToFillParent(); +} + +/** + * Creates a selection component for each time series displayed by this eeg view + * Part of AG startup + * The order of the filterGids determines the order of the selectors + * It must have the same ordering as all other timeseries arrays + * @private + */ +function _AG_init_selection(filterGids) { + let i; + let selectors = []; + + /** + * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels + * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) + */ + function getSelectedChannelsAsGlobalIndices() { + let all_selected = []; + let offset = 0; + + for (let i = 0; i < selectors.length; i++) { + const selector = selectors[i]; + const selected_in_current = selector.val(); + + for (let j = 0; j < selected_in_current.length; j++) { + all_selected.push(offset + parseInt(selected_in_current[j], 10)); + } + offset += selector._allValues.length; + } + return all_selected; + } + + // init selectors + let selectorId, selector; + + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.regionSelector(selectorId, {filterGid: filterGids[i]}); + selector.change(function (current_selection) { + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + refreshChannels(); + }); + selectors.push(selector); + } + // the first selector is special. we select by default some channels in it and in case of a dual view + // his selection is synchronized with the brain + AG_regionSelector = selectors[0]; + + // Initialize AG_submitableSelectedChannels + AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); + + if (AG_submitableSelectedChannels.length === 0) { + // Viewer breaks if this is empty. Fill the first few channels + const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); + // we take the values form the dom, a range(defaultSelectionLength) is not a valid selection if there are multiple time series + AG_submitableSelectedChannels = AG_regionSelector._allValues.slice(0, defaultSelectionLength); + AG_regionSelector.val(AG_submitableSelectedChannels); + } + + // Init the mode selection components. Assumes that there are part of the selector dom + let modeSelectors = []; + for (i = 0; i < filterGids.length; i++) { + selectorId = "#channelSelector" + i; + selector = TVBUI.modeAndStateSelector(selectorId, i); + selector.modeChanged(_AG_changeMode); + selector.stateVariableChanged(_AG_changeStateVariable); + modeSelectors.push(selector); + } + // The dual view needs to subscribe to this selector; so we save it like AG_regionSelector + AG_modeSelector = modeSelectors[0]; + + refreshChannels(); +} + +/** + * Read speed from the dom + * @param defaultSpeed default speed when there is no speed slider + * @private + */ +function _AG_get_speed(defaultSpeed) { + let speed = defaultSpeed; + if (!isSmallPreview && !isDoubleView) { + speed = $("#ctrl-input-speed").slider("value"); + } + return speed; +} + +/* + * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for + * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a + * 'Home' action in a series of zoom events. + */ +function AG_createYAxisDictionary(nr_channels) { + let ticks, yaxis_dict, increment; + + if (AG_translationStep > 0) { + ticks = []; + const step = AG_computedStep * AG_translationStep; + for (let i = 0; i < nr_channels; i++) { + ticks.push([i * step, chanDisplayLabels[displayedChannels[i]]]); + } + yaxis_dict = { + min: -step, + max: (nr_channels + 1) * step, + ticks: ticks, + zoomRange: [0.1, 20] + }; + increment = nr_channels * step / numberOfPointsForVerticalLine; + if (increment === 0) throw "infinite loop"; + for (let k = -step; k < (nr_channels + 1) * step; k += increment) { + followingLine.push([0, k]); + } + } else { + ticks = [0, 'allChannels']; + yaxis_dict = { + min: -AG_computedStep / 2, + max: AG_computedStep / 2, + ticks: ticks, + zoomRange: [0.1, 20] + }; + increment = AG_computedStep / numberOfPointsForVerticalLine; + if (increment === 0) throw "infinite loop"; + for (let kk = -AG_computedStep / 2; kk < AG_computedStep / 2; kk += increment) { + followingLine.push([0, kk]); + } + } + AG_options.yaxis = yaxis_dict; + AG_homeViewYValues = [yaxis_dict.min, yaxis_dict.max]; + AG_defaultYaxis = yaxis_dict; +} + +function refreshChannels() { + submitSelectedChannels(false); + drawGraph(false, noOfShiftedPoints); +} + +function _AG_changeMode(tsIndex, val) { + tsModes[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_changeStateVariable(tsIndex, val) { + tsStates[tsIndex] = parseInt(val); + refreshChannels(); +} + +function _AG_getSelectedDataAndLongestChannelIndex(data) { + let offset = 0; + let selectedData = []; + let channelLengths = []; + + for (let i = 0; i < data.length; i++) { + const selectedChannels = getDisplayedChannels(data[i], offset); + offset += data[i].length; + if (selectedChannels.length > 0) { + channelLengths.push(selectedChannels[0].length); + } else { + channelLengths.push(-1); + } + selectedData = selectedData.concat(selectedChannels); + } + const longestChannelIndex = channelLengths.indexOf(Math.max.apply(Math, channelLengths)); + return {selectedData: selectedData, longestChannelIndex: longestChannelIndex} +} + +/* + * Get required data for the channels in AG_submitableSelectedChannels. If none + * exist then just use the previous 'displayedChannels' (or default in case of first run). + */ +function submitSelectedChannels(isEndOfData) { + + AG_currentIndex = AG_numberOfVisiblePoints; + if (AG_submitableSelectedChannels.length === 0) { + AG_submitableSelectedChannels = displayedChannels.slice(); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + AG_allPoints = []; + displayedChannels = AG_submitableSelectedChannels.slice(0); + generateChannelColors(displayedChannels.length); + + let results = []; + for (let i = 0; i < nrOfPagesSet.length; i++) { + const dataURL = readDataPageURL(baseDataURLS[i], 0, dataPageSize, tsStates[i], tsModes[i]); + const data = HLPR_readJSONfromFile(dataURL); + results.push(parseData(data, i)); + } + const r = _AG_getSelectedDataAndLongestChannelIndex(results); + AG_allPoints = AG_allPoints.concat(r.selectedData); + longestChannelIndex = r.longestChannelIndex; + + // keep data only for the selected channels + AG_noOfLines = AG_allPoints.length; + } + + AG_displayedPoints = []; + AG_displayedTimes = []; + for (let ii = 0; ii < AG_noOfLines; ii++) { + AG_displayedPoints.push([]); + } + + if (!(isEndOfData && maxDataFileIndex === 0)) { + //read time + readTimeData(0, false); + AG_time = nextTimeData.slice(0); + } + // reset data + nextData = []; + nextTimeData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + currentDataFileIndex = 0; + totalPassedData = 0; + currentLinePosition = 0; + if (nanValueFound) { + displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); + } + + // draw the first 'AG_numberOfVisiblePoints' points + redrawCurrentView(); + if (!isSmallPreview) { + AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; + AG_scaling = $("#ctrl-input-scale").slider("value"); + } else { + AG_translationStep = 1; + } + + AG_createYAxisDictionary(AG_noOfLines); + redrawPlot([]); + resetToDefaultView(); + if (AG_isStopped) { + AG_isStopped = false; + drawGraph(false, noOfShiftedPoints); + AG_isStopped = true; + } else { + drawGraph(false, noOfShiftedPoints); + } +} + +/** + * This method decides if we are at the beginning or end of the graph, in which case we only need + * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. + */ +function shouldMoveLine(direction, shiftNo) { + shiftNo = shiftNo || 1; + let isEndOfGraph = false; + let isStartOfGraph = false; + if (direction === 1) { + isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); + isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); + if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { + isEndOfGraph = false; + } + } else { + isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); + isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); + if (AG_displayedTimes[currentLinePosition] <= 0) { + isStartOfGraph = false; + } + } + + return isStartOfGraph || isEndOfGraph; +} + +var isEndOfData = false; +var AG_channelColorsDict = {}; +var AG_reversedChannelColorsDict = {}; + +/* + * Generate different colors for each channel. + */ +function generateChannelColors(nr_of_channels) { + AG_channelColorsDict = {}; + AG_reversedChannelColorsDict = {}; + let step = parseInt(255 / nr_of_channels); + for (let i = 0; i < nr_of_channels; i++) { + const color = "rgb(" + 250 * (i % 2) + "," + (200 - i * step) + "," + 220 * ((i + 1) % 2) + ")"; + AG_channelColorsDict[color] = i; + AG_reversedChannelColorsDict[i] = color; + } +} + +/* + * Get y-axis labels and update colors to correspond to each channel + */ +function setLabelColors() { + const labels = $('.flot-y-axis .tickLabel'); + for (let i = 0; i < labels.length; i++) { + const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); + if (chan_idx >= 0) { + labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; + labels[i].style.left = 80 + (i % 2) * 40 + 'px'; + } + } +} + +/* + * This method draw the actual plot. The 'executeShift' parameter decides if a shift is + * to be done, or just use the previous data points. 'shiftNo' decides the number of points + * that will be shifted. + */ +function drawGraph(executeShift, shiftNo) { + let i; + noOfShiftedPoints = shiftNo; + if (isEndOfData) { + isEndOfData = false; + submitSelectedChannels(true); + } + if (t !== null && t !== undefined) { + clearTimeout(t); + } + if (AG_isStopped) { + return; + } + if (shouldLoadNextDataFile()) { + loadNextDataFile(); + } + + let direction = 1; + if (_AG_get_speed(1) < 0) { + direction = -1; + } + + let moveLine = shouldMoveLine(direction, noOfShiftedPoints); + //Increment line position in case we need to move the line + if (moveLine && executeShift && !AG_isSpeedZero) { + currentLinePosition = currentLinePosition + noOfShiftedPoints * direction; + } + + if (currentLinePosition >= AG_numberOfVisiblePoints) { + isEndOfData = true; + } + + if (executeShift && !AG_isSpeedZero && !moveLine) { + let count = 0; + if (direction === -1) { + if (currentDataFileIndex > 0 || AG_currentIndex > AG_numberOfVisiblePoints) { + count = 0; + while (count < noOfShiftedPoints && AG_currentIndex - count > AG_numberOfVisiblePoints) { + count = count + 1; + AG_displayedTimes.unshift(AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count]); + for (i = 0; i < AG_displayedPoints.length; i++) { + AG_displayedPoints[i].unshift( + [AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count], + AG_addTranslationStep(AG_allPoints[i][AG_currentIndex - AG_numberOfVisiblePoints - count], i) + ]); + AG_displayedPoints[i].pop(); + } + AG_displayedTimes.pop(); + } + + if (AG_currentIndex - count > AG_numberOfVisiblePoints) { + AG_currentIndex = AG_currentIndex - count; + } else { + AG_currentIndex = Math.min(AG_currentIndex, AG_numberOfVisiblePoints); + if (currentDataFileIndex > 0 && isNextDataLoaded) { + changeCurrentDataFile(); + } + } + } + } else { + if (totalTimeLength > AG_currentIndex + totalPassedData) { + // here we add new 'noOfShiftedPoints' points to the chart and remove the first 'noOfShiftedPoints' visible points + count = 0; + while (count < noOfShiftedPoints && totalTimeLength > AG_currentIndex + count) { + AG_displayedTimes.push(AG_time[AG_currentIndex + count]); + for (i = 0; i < AG_displayedPoints.length; i++) { + AG_displayedPoints[i].push( + [AG_time[AG_currentIndex + count], + AG_addTranslationStep(AG_allPoints[i][AG_currentIndex + count], i) + ]); + AG_displayedPoints[i].shift(); + } + AG_displayedTimes.shift(); + count = count + 1; + } + + if (AG_currentIndex + count < AG_allPoints[longestChannelIndex].length) { + AG_currentIndex = AG_currentIndex + count; + } else { + AG_currentIndex = Math.max(AG_currentIndex, AG_allPoints[longestChannelIndex].length); + if (maxDataFileIndex > 0 && isNextDataLoaded) { + changeCurrentDataFile(); + } + } + } + } + } + if (!AG_isSpeedZero) { + for (i = 0; i < followingLine.length; i++) { + followingLine[i][0] = AG_displayedTimes[currentLinePosition]; + } + let preparedData = []; + for (let j = 0; j < AG_displayedPoints.length; j++) { + preparedData.push({data: AG_displayedPoints[j].slice(0), color: AG_reversedChannelColorsDict[j]}); + } + preparedData.push({data: followingLine, color: 'rgb(255, 0, 0)'}); + plot.setData(preparedData); + plot.setupGrid(); + plot.draw(); + setLabelColors(); + } + if (!isDoubleView) { + t = setTimeout("drawGraph(true, noOfShiftedPoints)", getTimeoutBasedOnSpeed()); + } +} + +/* + * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. + */ +function redrawPlot(data) { + const target = $('#EEGcanvasDiv'); + const resizerChildren = target.children('.ui-resizable-handle'); + for (let i = 0; i < resizerChildren.length; i++) { + target[0].removeChild(resizerChildren[i]); + } + plot = $.plot(target, data, $.extend(true, {}, AG_options)); + for (let j = 0; j < resizerChildren.length; j++) { + target[0].appendChild(resizerChildren[j]); + } + setLabelColors(); +} + + +/** + * This hook will be called before Flot copies and normalizes the raw data for the given + * series. If the function fills in datapoints.points with normalized + * points and sets datapoints.pointsize to the size of the points, + * Flot will skip the copying/normalization step for this series. + */ +function processRawDataHook(plot, series, data, datapoints) { + datapoints.format = [ + {x: true, number: true, required: true}, + {y: true, number: true, required: true} + ]; + datapoints.pointsize = 2; + + for (let i = 0; i < data.length; i++) { + datapoints.points.push(data[i][0]); + datapoints.points.push(data[i][1]); + } + + series.xaxis.used = series.yaxis.used = true; +} + + +/** + * Translate the given value. + * We use this method to translate the values for the drawn line charts because we don't want them to overlap. + * + * @param value the value that should be translated. + * @param index the number of AG_translationSteps that should be used for translating the given value. + * @return {number} + */ +function AG_addTranslationStep(value, index) { + return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; +} + +function getTimeoutBasedOnSpeed() { + const currentAnimationSpeedValue = _AG_get_speed(40); + if (currentAnimationSpeedValue === 0) { + return 300; + } + const timeout = 10 - Math.abs(currentAnimationSpeedValue); + if (timeout === 9) { + return 3000; + } + if (timeout === 8) { + return 2000; + } + if (timeout === 7) { + return 1000; + } + return timeout * 100 + 25; +} + +/* + * Load the data from a given step and center plot around that step. + */ +function loadEEGChartFromTimeStep(step) { + // Read all data for the page in which the selected step falls into + const chunkForStep = Math.floor(step / dataPageSize); + const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); + const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; + AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); + AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); + + totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page + currentDataFileIndex = chunkForStep; + AG_displayedPoints = []; + const indexInPage = step % dataPageSize; // This is the index in the current page that step will have + let fromIdx, toIdx; + currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times + if (indexInPage <= AG_numberOfVisiblePoints / 2) { + if (chunkForStep === 0) { + // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first + // AG_numberOfVisiblePoints values + AG_currentIndex = AG_numberOfVisiblePoints; + currentLinePosition = indexInPage; + prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from before this page + addFromPreviousPage(indexInPage, chunkForStep); + } + } else { + if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { + if (chunkForStep >= nrOfPagesSet[0] - 1) { + // We are at the end of the graph. The line is starting to move further right from the middle position. We are just + // displaying the last AG_numberOfVisiblePoints from the last page + if (AG_time.length > AG_numberOfVisiblePoints) { + fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; + } else { + fromIdx = 0; + } + toIdx = AG_time.length - 1; + AG_currentIndex = toIdx; + currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } else { + // We are at an edge case between pages. So in order to have all the + // AG_numberOfVisiblePoints we need to also load the points from after this page + addFromNextPage(indexInPage, chunkForStep); + } + } else { + // We are somewhere in the middle of the graph. + fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; + toIdx = indexInPage + AG_numberOfVisiblePoints / 2; + AG_currentIndex = toIdx; + prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); + } + } + nextData = []; + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the previous page. + */ +function addFromPreviousPage(indexInPage, currentPage) { + + const previousPageUrl = readDataPageURL(baseDataURLS[0], (currentPage - 1) * dataPageSize, currentPage * dataPageSize, tsStates[0], tsModes[0]); + let previousData = parseData(HLPR_readJSONfromFile(previousPageUrl), 0); + previousData = getDisplayedChannels(previousData, 0).slice(0); + const previousTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage - 1]); + // Compute which slices we would need from the 'full' two-pages data. + // We only need the difference so to center indexInPage at AG_numberOfVisiblePoints / 2 + let fromIdx, toIdx; + fromIdx = previousData[0].length - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is from where we need to read from previous data + AG_currentIndex = toIdx = AG_numberOfVisiblePoints - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is where we need to add from the current page + // Just generate displayed point and displayed times now + for (let idx = 0; idx < previousData.length; idx++) { + let idy; + let oneLine = []; + // Push data that is from previos slice + for (idy = fromIdx; idy < previousData[0].length; idy++) { + oneLine.push([previousTimeData[idy], AG_addTranslationStep(previousData[idx][idy], idx)]); + } + // Now that that is from our current slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = previousTimeData.slice(fromIdx).concat(AG_time.slice(0, toIdx)); + previousData = null; +} + +/* + * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center + * around indexInPage, if some of the required data is on the next page. + */ +function addFromNextPage(indexInPage, currentPage) { + + const followingPageUrl = readDataPageURL(baseDataURLS[0], (currentPage + 1) * dataPageSize, (currentPage + 2) * dataPageSize, tsStates[0], tsModes[0]); + let followingData = parseData(HLPR_readJSONfromFile(followingPageUrl), 0); + followingData = getDisplayedChannels(followingData, 0).slice(0); + const followingTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage + 1]); + let fromIdx, toIdx; + fromIdx = indexInPage - (AG_numberOfVisiblePoints / 2); // We need to read starting from here from the current page + AG_currentIndex = toIdx = fromIdx + AG_numberOfVisiblePoints - AG_allPoints[0].length; // We need to read up to here from next page + for (let idx = 0; idx < AG_allPoints.length; idx++) { + let idy; + const oneLine = []; + // Push data that is from this slice + for (idy = fromIdx; idy < AG_allPoints[0].length; idy++) { + oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); + } + // Now that that is from next slice + for (idy = 0; idy < toIdx; idy++) { + oneLine.push([followingTimeData[idy], AG_addTranslationStep(followingData[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = AG_time.slice(fromIdx).concat(followingTimeData.slice(0, toIdx)); + // Since next page is already loaded, that becomes the current page + AG_allPoints = followingData; + AG_time = followingTimeData; + totalPassedData = (currentPage + 1) * dataPageSize; + currentDataFileIndex = currentPage + 1; + isNextDataLoaded = true; + isNextTimeDataLoaded = true; +} + +/* + * Just re-populate whole displayedPoints and displayedTimes given a start and end index. + */ +function prepareDisplayData(fromIdx, toIdx, pointsArray, timeArray) { + + for (let idx = 0; idx < pointsArray.length; idx++) { + let oneLine = []; + for (let idy = fromIdx; idy < toIdx; idy++) { + oneLine.push([timeArray[idy], AG_addTranslationStep(pointsArray[idx][idy], idx)]); + } + AG_displayedPoints.push(oneLine); + } + AG_displayedTimes = timeArray.slice(fromIdx, toIdx) +} + +/* + * Read the next data file asyncronously. Also get the corresponding time data file. + */ +function loadNextDataFile() { + AG_isLoadStarted = true; + const nx_idx = getNextDataFileIndex(); + cachedFileIndex = nx_idx; + AG_readFileDataAsynchronous(nrOfPagesSet, noOfChannelsPerSet, nx_idx, maxChannelLength, 0); + readTimeData(nx_idx, true); +} + +function changeCurrentDataFile() { + if (!isNextDataLoaded || !isNextTimeDataLoaded) { + return; + } + + if (cachedFileIndex !== getNextDataFileIndex()) { + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + nextData = []; + nextTimeData = []; + return; + } + + const speed = _AG_get_speed(100); + const longestChannelLength = AG_allPoints[longestChannelIndex].length; + + if (speed > 0) { + totalPassedData = totalPassedData + longestChannelLength; + if (longestChannelLength < AG_currentIndex) { + AG_currentIndex = -(longestChannelLength - AG_currentIndex); + } else { + AG_currentIndex = 0; + } + } else if (speed < 0) { + totalPassedData = totalPassedData - longestChannelLength; + if (totalPassedData < 0) { + totalPassedData = 0; + } + } else { + return; + } + + AG_allPoints = nextData.slice(0); + nextData = []; + AG_time = nextTimeData.slice(0); + nextTimeData = []; + currentDataFileIndex = getNextDataFileIndex(); + AG_isLoadStarted = false; + isNextDataLoaded = false; + isNextTimeDataLoaded = false; + + if (speed < 0) { + AG_currentIndex = longestChannelLength + AG_currentIndex; + } +} + +function shouldLoadNextDataFile() { + if (!AG_isLoadStarted && maxDataFileIndex > 0) { + const nextFileIndex = getNextDataFileIndex(); + const speed = _AG_get_speed(1); // Assume left to right pass of data + if (currentDataFileIndex !== nextFileIndex) { + if ((speed > 0) && (maxChannelLength - AG_currentIndex < threshold * AG_numberOfVisiblePoints)) { + return true; + } + if ((speed < 0) && (AG_currentIndex - AG_numberOfVisiblePoints < threshold * AG_numberOfVisiblePoints)) { + return true; + } + } + } + return false; +} + +/* + * In case of multiple arrays find out which has the most data files that need + * to be loaded. + */ +function setMaxDataFileIndex(nrOfPagesPerArray) { + let max_ln = 0; + for (let i = 0; i < nrOfPagesPerArray.length; i++) { + if (nrOfPagesPerArray[i] > max_ln) { + max_ln = nrOfPagesPerArray[i]; + } + } + maxDataFileIndex = max_ln - 1; +} + +/* + * Return the index of the next data file that should be loaded. + */ +function getNextDataFileIndex() { + let nextIndex; + const speed = _AG_get_speed(100); + if (speed > 0) { + nextIndex = currentDataFileIndex + 1; + if (nextIndex >= maxDataFileIndex) { + return maxDataFileIndex; + } + } else { + nextIndex = currentDataFileIndex - 1; + if (nextIndex <= 0) { + return 0; + } + } + return nextIndex; +} + +function AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex) { + if (dataSetIndex >= nrOfPages.length) { + isNextDataLoaded = true; + // keep data only for the selected channels + const r = _AG_getSelectedDataAndLongestChannelIndex(nextData); + longestChannelIndex = r.longestChannelIndex; + nextData = r.selectedData; //todo: occasional shape mismatch 3d <- 2d + return; + } + if (nrOfPages[dataSetIndex] - 1 < currentFileIndex && AG_isLoadStarted) { + // todo: assumed that this is computing a padding for smaller signals. check if this is really the purpose of this + let j; + let padding = []; + let oneChannel = []; + for (j = 0; j < maxChannelLength; j++) { + oneChannel.push(0); + } + for (j = 0; j < noOfChannelsPerSet[dataSetIndex]; j++) { + padding.push(oneChannel); + } + nextData.push(padding); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } else { + doAjaxCall({ + url: readDataPageURL(baseDataURLS[dataSetIndex], currentFileIndex * dataPageSize, (currentFileIndex + 1) * dataPageSize, tsStates[dataSetIndex], tsModes[dataSetIndex]), + success: function (data) { + if (AG_isLoadStarted) { + data = $.parseJSON(data); + const result = parseData(data, dataSetIndex); + nextData.push(result); + + AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); + } + } + }); + } +} + +/* + * Data is received from the HLPR_parseJSON as a 500/74 array. We need to transform it + * into an 74/500 one and in the transformation also replace all NaN values. + */ +function parseData(dataArray, dataSetIndex) { + + let result = []; + for (let i = 0; i < noOfChannelsPerSet[dataSetIndex]; i++) { + result.push([]); + } + for (let j = 0; j < dataArray.length; j++) { + for (let k = 0; k < noOfChannelsPerSet[dataSetIndex]; k++) { + let arrElem = dataArray[j][k]; + if (arrElem === 'NaN') { + nanValueFound = true; + arrElem = 0; + } + result[k].push(arrElem); + } + } + return result; +} + +/** + * + * @param fileIndex + * @param asyncRead true only if the file should be read asynchronous + */ +function readTimeData(fileIndex, asyncRead) { + if (timeSetUrls[longestChannelIndex].length <= fileIndex) { + nextTimeData = []; + for (let i = 0; i < maxChannelLength; i++) { + nextTimeData.push(totalPassedData + i); + } + isNextTimeDataLoaded = true; + } else { + if (asyncRead) { + doAjaxCall({ + url: timeSetUrls[longestChannelIndex][fileIndex], + success: function (data) { + nextTimeData = $.parseJSON(data); + isNextTimeDataLoaded = true; + } + }); + } else { + nextTimeData = HLPR_readJSONfromFile(timeSetUrls[longestChannelIndex][fileIndex]); + isNextTimeDataLoaded = true; + } + } +} + +function getArrayFromDataFile(dataFile) { + let fileData = dataFile.replace(/\n/g, " ").replace(/\t/g, " "); + let arrayData = $.trim(fileData).split(" "); + for (let i = 0; i < arrayData.length; i++) { + arrayData[i] = parseFloat(arrayData[i]); + } + return arrayData; +} + +function getDisplayedChannels(listOfAllChannels, offset) { + let selectedData = []; + for (let i = 0; i < displayedChannels.length; i++) { + if (listOfAllChannels[displayedChannels[i] - offset] !== undefined) { + selectedData.push(listOfAllChannels[displayedChannels[i] - offset].slice(0)); + } + } + return selectedData; +} From 607a5f46f120d510f31b06bcb6c35390b9f33f71 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 1 Aug 2018 15:07:41 +1000 Subject: [PATCH 32/53] TVB-2379 Update animated_graph.js from dualBrainViewer.js --- .../new_dual_brain/scripts/animated_graph.js | 428 +++---- .../new_dual_brain/scripts/dualBrainViewer.js | 1025 ----------------- 2 files changed, 162 insertions(+), 1291 deletions(-) delete mode 100644 tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js index 50cf15b12..775764102 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js @@ -51,7 +51,7 @@ var AG_computedStep = 50; //The normalization steps for each of the channels, in order to bring them centered near the channel bar var AG_normalizationSteps = []; //If the animation is paused using pause/start button -var AG_isStopped = false; +var AG_isStopped = true; //If animation speed is set at a 0 value var AG_isSpeedZero = false; //the number of points that are shifted/unshift at a moment @@ -99,27 +99,13 @@ var lbl_x_width = 100; var lbl_x_height = 30; var zoom_range = [0.1, 20]; -var AG_defaultXaxis = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; -var AG_defaultYaxis = {show: false, zoomRange: zoom_range, labelWidth: 200, labelHeight: 30}; // the index of the cached file (the file that was loaded asynchronous) var cachedFileIndex = 0; -var labelX = ""; -var chartTitle = ""; //The displayed labels for the graph var chanDisplayLabels = []; // setup plot var AG_options = { - series: { - shadowSize: 0, - color: 'blue' - }, // drawing is faster without shadows - lines: { - lineWidth: 1, - show: true - }, - yaxis: AG_defaultYaxis, - xaxis: AG_defaultXaxis, grid: { backgroundColor: 'white', hoverable: true, @@ -137,9 +123,6 @@ var AG_options = { }, legend: { show: false - }, - hooks: { - processRawData: [processRawDataHook] } }; @@ -176,27 +159,16 @@ var AG_regionSelector = null; // State mode selector. Used as a global only in dual view var AG_modeSelector = null; -function resizeToFillParent() { - const canvas = $('#EEGcanvasDiv'); - let container, width, height; - if (!isSmallPreview) { - // Just use parent section width and height. For width remove some space for the labels to avoid scrolls - // For height we have the toolbar there. Using 100% does not seem to work properly with FLOT. - container = canvas.parent(); - width = container.width() - 40; - height = container.height() - 80; - } else { - container = $('body'); - width = container.width() - 40; - height = container.height() - 20; - } - canvas.width(width).height(height); -} +// GID for the D3 viewer +var filterGid = null; + +//timeseries viewer +var ts = null; window.onresize = function () { resizeToFillParent(); - redrawPlot(plot.getData()); + // redrawPlot(plot.getData()); }; /** @@ -211,11 +183,7 @@ function AG_startAnimatedChart(ag_settings) { drawSliderForAnimationSpeed(); _AG_init_selection(ag_settings.measurePointsSelectionGIDs); - bindHoverEvent(); - initializeCanvasEvents(); - if (!ag_settings.extended_view) { - bindZoomEvent(); - } + } function AG_startAnimatedChartPreview(ag_settings) { @@ -267,6 +235,7 @@ function _AG_initGlobals(ag_settings) { totalTimeLength = ag_settings.totalLength; nanValueFound = ag_settings.nan_value_found; AG_computedStep = ag_settings.translationStep; + } /** @@ -286,7 +255,7 @@ function _AG_initPaginationState(number_of_visible_points) { * @private */ function _AG_preStart() { - resizeToFillParent(); + // resizeToFillParent(); } /** @@ -300,6 +269,8 @@ function _AG_init_selection(filterGids) { let i; let selectors = []; + filterGid = filterGids; + /** * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) @@ -375,53 +346,8 @@ function _AG_get_speed(defaultSpeed) { return speed; } -/* - * Create FLOT specific options dictionary for the y axis, with correct labels and positioning for - * all channels. Then store these values in 'AG_homeViewYValues' so they can be used in case of a - * 'Home' action in a series of zoom events. - */ -function AG_createYAxisDictionary(nr_channels) { - let ticks, yaxis_dict, increment; - - if (AG_translationStep > 0) { - ticks = []; - const step = AG_computedStep * AG_translationStep; - for (let i = 0; i < nr_channels; i++) { - ticks.push([i * step, chanDisplayLabels[displayedChannels[i]]]); - } - yaxis_dict = { - min: -step, - max: (nr_channels + 1) * step, - ticks: ticks, - zoomRange: [0.1, 20] - }; - increment = nr_channels * step / numberOfPointsForVerticalLine; - if (increment === 0) throw "infinite loop"; - for (let k = -step; k < (nr_channels + 1) * step; k += increment) { - followingLine.push([0, k]); - } - } else { - ticks = [0, 'allChannels']; - yaxis_dict = { - min: -AG_computedStep / 2, - max: AG_computedStep / 2, - ticks: ticks, - zoomRange: [0.1, 20] - }; - increment = AG_computedStep / numberOfPointsForVerticalLine; - if (increment === 0) throw "infinite loop"; - for (let kk = -AG_computedStep / 2; kk < AG_computedStep / 2; kk += increment) { - followingLine.push([0, kk]); - } - } - AG_options.yaxis = yaxis_dict; - AG_homeViewYValues = [yaxis_dict.min, yaxis_dict.max]; - AG_defaultYaxis = yaxis_dict; -} - function refreshChannels() { submitSelectedChannels(false); - drawGraph(false, noOfShiftedPoints); } function _AG_changeMode(tsIndex, val) { @@ -434,6 +360,12 @@ function _AG_changeStateVariable(tsIndex, val) { refreshChannels(); } + +//this function is used in virtualBrain.js keep it for now +function drawGraph() { + +} + function _AG_getSelectedDataAndLongestChannelIndex(data) { let offset = 0; let selectedData = []; @@ -458,7 +390,6 @@ function _AG_getSelectedDataAndLongestChannelIndex(data) { * exist then just use the previous 'displayedChannels' (or default in case of first run). */ function submitSelectedChannels(isEndOfData) { - AG_currentIndex = AG_numberOfVisiblePoints; if (AG_submitableSelectedChannels.length === 0) { AG_submitableSelectedChannels = displayedChannels.slice(); @@ -507,27 +438,74 @@ function submitSelectedChannels(isEndOfData) { displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); } - // draw the first 'AG_numberOfVisiblePoints' points - redrawCurrentView(); - if (!isSmallPreview) { - AG_translationStep = $('#ctrl-input-spacing').slider("option", "value") / 4; - AG_scaling = $("#ctrl-input-scale").slider("value"); - } else { - AG_translationStep = 1; + + //The shape we use for time series now only uses 1D + var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; + var selectedLabels = [] + for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { + selectedLabels.push([chanDisplayLabels[displayedChannels[i]]]); } - AG_createYAxisDictionary(AG_noOfLines); - redrawPlot([]); - resetToDefaultView(); - if (AG_isStopped) { - AG_isStopped = false; - drawGraph(false, noOfShiftedPoints); - AG_isStopped = true; + //use d3 to create 2D plot + ts = tv.plot.time_series(); + ts.baseURL(baseDataURLS[0]).preview(false).mode(0).state_var(0); + ts.shape(dataShape).t0(AG_time[1] / 2).dt(AG_time[1]); + ts.labels(selectedLabels); + ts.channels(AG_submitableSelectedChannels); + + + resizeToFillParent(ts); + $('#time-series-viewer').empty(); + ts(d3.select("#time-series-viewer")); + tsView = ts; + + VS_selectedchannels=tsView.channels(); + + // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value + _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; + + $("#ctrl-input-scale").slider({ + value: 50, min: 0, max: 100, + slide: function (event, target) { + _updateScalingFromSlider(target.value); + } + }); + +} + + +function resizeToFillParent(ts) { + var container, width, height; + + container = $('#eegSectionId').parent(); + width = container.width(); + + //minus toolbar's height + height = container.height() - 60; + + ts.w(width).h(height); + +} + +function _updateScalingFromSlider(value) { + if (value == null) { + value = $("#ctrl-input-scale").slider("value"); + } + var expo_scale = (value - 50) / 50; // [1 .. -1] + var scale = Math.pow(10, expo_scale * 4); // [1000..-1000] + tsView.magic_fcs_amp_scl = _initial_magic_fcs_amp_scl * scale; + tsView.prepare_data(); + tsView.render_focus(); + + if (scale >= 1) { + $("#display-scale").html("1 * " + scale.toFixed(2)); } else { - drawGraph(false, noOfShiftedPoints); + $("#display-scale").html("1 / " + (1 / scale).toFixed(2)); } + } + /** * This method decides if we are at the beginning or end of the graph, in which case we only need * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. @@ -571,169 +549,6 @@ function generateChannelColors(nr_of_channels) { } } -/* - * Get y-axis labels and update colors to correspond to each channel - */ -function setLabelColors() { - const labels = $('.flot-y-axis .tickLabel'); - for (let i = 0; i < labels.length; i++) { - const chan_idx = chanDisplayLabels.indexOf(labels[i].firstChild.textContent); - if (chan_idx >= 0) { - labels[i].style.color = AG_reversedChannelColorsDict[displayedChannels.indexOf(chan_idx)]; - labels[i].style.left = 80 + (i % 2) * 40 + 'px'; - } - } -} - -/* - * This method draw the actual plot. The 'executeShift' parameter decides if a shift is - * to be done, or just use the previous data points. 'shiftNo' decides the number of points - * that will be shifted. - */ -function drawGraph(executeShift, shiftNo) { - let i; - noOfShiftedPoints = shiftNo; - if (isEndOfData) { - isEndOfData = false; - submitSelectedChannels(true); - } - if (t !== null && t !== undefined) { - clearTimeout(t); - } - if (AG_isStopped) { - return; - } - if (shouldLoadNextDataFile()) { - loadNextDataFile(); - } - - let direction = 1; - if (_AG_get_speed(1) < 0) { - direction = -1; - } - - let moveLine = shouldMoveLine(direction, noOfShiftedPoints); - //Increment line position in case we need to move the line - if (moveLine && executeShift && !AG_isSpeedZero) { - currentLinePosition = currentLinePosition + noOfShiftedPoints * direction; - } - - if (currentLinePosition >= AG_numberOfVisiblePoints) { - isEndOfData = true; - } - - if (executeShift && !AG_isSpeedZero && !moveLine) { - let count = 0; - if (direction === -1) { - if (currentDataFileIndex > 0 || AG_currentIndex > AG_numberOfVisiblePoints) { - count = 0; - while (count < noOfShiftedPoints && AG_currentIndex - count > AG_numberOfVisiblePoints) { - count = count + 1; - AG_displayedTimes.unshift(AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count]); - for (i = 0; i < AG_displayedPoints.length; i++) { - AG_displayedPoints[i].unshift( - [AG_time[AG_currentIndex - AG_numberOfVisiblePoints - count], - AG_addTranslationStep(AG_allPoints[i][AG_currentIndex - AG_numberOfVisiblePoints - count], i) - ]); - AG_displayedPoints[i].pop(); - } - AG_displayedTimes.pop(); - } - - if (AG_currentIndex - count > AG_numberOfVisiblePoints) { - AG_currentIndex = AG_currentIndex - count; - } else { - AG_currentIndex = Math.min(AG_currentIndex, AG_numberOfVisiblePoints); - if (currentDataFileIndex > 0 && isNextDataLoaded) { - changeCurrentDataFile(); - } - } - } - } else { - if (totalTimeLength > AG_currentIndex + totalPassedData) { - // here we add new 'noOfShiftedPoints' points to the chart and remove the first 'noOfShiftedPoints' visible points - count = 0; - while (count < noOfShiftedPoints && totalTimeLength > AG_currentIndex + count) { - AG_displayedTimes.push(AG_time[AG_currentIndex + count]); - for (i = 0; i < AG_displayedPoints.length; i++) { - AG_displayedPoints[i].push( - [AG_time[AG_currentIndex + count], - AG_addTranslationStep(AG_allPoints[i][AG_currentIndex + count], i) - ]); - AG_displayedPoints[i].shift(); - } - AG_displayedTimes.shift(); - count = count + 1; - } - - if (AG_currentIndex + count < AG_allPoints[longestChannelIndex].length) { - AG_currentIndex = AG_currentIndex + count; - } else { - AG_currentIndex = Math.max(AG_currentIndex, AG_allPoints[longestChannelIndex].length); - if (maxDataFileIndex > 0 && isNextDataLoaded) { - changeCurrentDataFile(); - } - } - } - } - } - if (!AG_isSpeedZero) { - for (i = 0; i < followingLine.length; i++) { - followingLine[i][0] = AG_displayedTimes[currentLinePosition]; - } - let preparedData = []; - for (let j = 0; j < AG_displayedPoints.length; j++) { - preparedData.push({data: AG_displayedPoints[j].slice(0), color: AG_reversedChannelColorsDict[j]}); - } - preparedData.push({data: followingLine, color: 'rgb(255, 0, 0)'}); - plot.setData(preparedData); - plot.setupGrid(); - plot.draw(); - setLabelColors(); - } - if (!isDoubleView) { - t = setTimeout("drawGraph(true, noOfShiftedPoints)", getTimeoutBasedOnSpeed()); - } -} - -/* - * Do a redraw of the plot. Be sure to keep the resizable margin elements as the plot method seems to destroy them. - */ -function redrawPlot(data) { - const target = $('#EEGcanvasDiv'); - const resizerChildren = target.children('.ui-resizable-handle'); - for (let i = 0; i < resizerChildren.length; i++) { - target[0].removeChild(resizerChildren[i]); - } - plot = $.plot(target, data, $.extend(true, {}, AG_options)); - for (let j = 0; j < resizerChildren.length; j++) { - target[0].appendChild(resizerChildren[j]); - } - setLabelColors(); -} - - -/** - * This hook will be called before Flot copies and normalizes the raw data for the given - * series. If the function fills in datapoints.points with normalized - * points and sets datapoints.pointsize to the size of the points, - * Flot will skip the copying/normalization step for this series. - */ -function processRawDataHook(plot, series, data, datapoints) { - datapoints.format = [ - {x: true, number: true, required: true}, - {y: true, number: true, required: true} - ]; - datapoints.pointsize = 2; - - for (let i = 0; i < data.length; i++) { - datapoints.points.push(data[i][0]); - datapoints.points.push(data[i][1]); - } - - series.xaxis.used = series.yaxis.used = true; -} - /** * Translate the given value. @@ -775,7 +590,6 @@ function loadEEGChartFromTimeStep(step) { const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); - totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page currentDataFileIndex = chunkForStep; AG_displayedPoints = []; @@ -1127,3 +941,85 @@ function getDisplayedChannels(listOfAllChannels, offset) { } return selectedData; } + + +//------------------------------------------------START ZOOM RELATED CODE-------------------------------------------------------- +function stopAnimation() { + AG_isStopped = !AG_isStopped; + var btn = $("#ctrl-action-pause"); + if (AG_isStopped) { + btn.html("Start"); + btn.attr("class", "action action-controller-launch"); + } else { + btn.html("Pause"); + btn.attr("class", "action action-controller-pause"); + } + +} + + +//------------------------------------------------START SCALE RELATED CODE-------------------------------------------------------- + + +function drawSliderForScale() { + function _onchange() { + /** When scaling, we need to redraw the graph and update the HTML with the new values. + */ + var spacing = $("#ctrl-input-spacing").slider("value") / 4; + var scale = $("#ctrl-input-scale").slider("value"); + + if (spacing >= 0 && AG_currentIndex <= AG_numberOfVisiblePoints) { + AG_currentIndex = AG_numberOfVisiblePoints; + } else if (spacing < 0 && (AG_allPoints[0].length - AG_currentIndex) < AG_numberOfVisiblePoints) { + AG_currentIndex = AG_allPoints[0].length; + } + AG_displayedPoints = []; + for (var i = 0; i < AG_noOfLines; i++) { + AG_displayedPoints.push([]); + } + _updateScaleFactor(scale); + } + + $("#ctrl-input-scale").slider({value: 1, min: 1, max: 32, change: _onchange}); + + $("#display-scale").html("" + AG_scaling); +} + +function _updateScaleFactor(scale) { + AG_scaling = scale; + $("#display-scale").html("" + AG_scaling); +} + +//------------------------------------------------END SCALE RELATED CODE-------------------------------------------------------- + +//------------------------------------------------START SPEED RELATED CODE-------------------------------------------------------- + +function drawSliderForAnimationSpeed() { + $("#ctrl-input-speed").slider({ + orientation: 'horizontal', + value: 3, + min: -50, + max: 50, + change: function (event, ui) { + updateSpeedFactor(); + } + }); +} + + +function updateSpeedFactor() { + var speed = $("#ctrl-input-speed").slider("option", "value"); + $('#display-speed').html('' + speed); + AG_isSpeedZero = (speed == 0); +} + +//------------------------------------------------END SPEED RELATED CODE-------------------------------------------------------- +//------------------------------------------------START TIME SERIES TIME SELECTION RELATED CODE-------------------------------------------------------- + + function intervalSet(){ + var start=$('#SetIntervalStart').val(); + var end=$('#SetIntervalEnd').val(); + if(start. - * - **/ - -/* globals doAjaxCall, readDataPageURL, HLPR_readJSONfromFile */ - -// //it contains all the points that have to be/have been displayed (it contains all the points from the read file); -// //it is an array of arrays (each array contains the points for a certain line chart) -var AG_allPoints = []; -// it supplies the labels for x axis (time in milliseconds) -var AG_time = []; -//it is used for clearing timing events (the event that calls the drawGraph method after a specified time-interval) -var t = null; -//how many elements will be visible on the screen -//computed on the server -var AG_numberOfVisiblePoints = 0; -//all the points that are visible on the screen at a certain moment; the points are read from the AG_allPoints array -//and are translated with a value equal to [AG_translationStep * (AG_noOfLines - the index of the current line)] -//THE FORM of this matrix is: [ [[t1, a1], [t2, a2], ...], [[t1, b1], [t2, b2], ...], ..., [[t1, n1], [t2, n2], ...]] -// t1, t2, ... - represents time that is visible on the screen at a certain moment; -// a1, a2,... - represents the translated values -var AG_displayedPoints = []; -//All the times values that are displayed at a certain moment. To be used by the vertical time line. -var AG_displayedTimes = []; -//the last element that was displayed on the screen is located at this index; the index refers to AG_allPoints array -var AG_currentIndex = 0; -//this var should be set to the length of the AG_allPoints array -var AG_noOfLines = 0; -// the step used for translating the drawn line charts; we translate the drawn line charts because we don't want them to overlap -// the lines will be translated with AG_translationStep * AG_computedStep -var AG_translationStep = 1; -// a scaling factor for the displayed signal -var AG_scaling = 1; -// this var is computed on the server. It is used for line translation (AG_translationStep * AG_computedStep). -var AG_computedStep = 50; -//The normalization steps for each of the channels, in order to bring them centered near the channel bar -var AG_normalizationSteps = []; -//If the animation is paused using pause/start button -var AG_isStopped = true; -//If animation speed is set at a 0 value -var AG_isSpeedZero = false; -//the number of points that are shifted/unshift at a moment -var noOfShiftedPoints = 1; -// List of channels that will be submited on a change of the displayed channels -var AG_submitableSelectedChannels = []; -// contains the indexes of the channels that are displayed -var displayedChannels = []; -// a list of urls pointing to the files from where we should read the time -var timeSetUrls = []; -//a list containing the number of channel in each file specified in 'dataSetUrls' fields -var noOfChannelsPerSet = []; -// the number of points from the longest channel -var maxChannelLength = 0; -// the maximum number of data files from all the submited datatypes -var maxDataFileIndex = 0; -// represents the file index from the dataset that is displayed in the chart -var currentDataFileIndex = 0; -// contains the parsed data for the next file from the dataset -var nextData = []; -// contains the time for the next file from the dataset -var nextTimeData = []; -// true only if the next file from dataset was loaded into memory -var isNextDataLoaded = false; -// true only if the next time data was loaded into memory -var isNextTimeDataLoaded = false; -// true only if the the process of loading a file is started -var AG_isLoadStarted = false; -// this is the number of steps left before updating the next file -var threshold = 10; -// the amount of data that has passed -var totalPassedData = 0; -// the number of channels -var totalNumberOfChannels = 0; -// true only if any of the displayed channels contains NaN values -var nanValueFound = false; -//Channel prefix for each array of data -var channelPrefix = "Channel: "; -// -var totalTimeLength = 0; -//Default values for the x and y axis of the plot -//NOTE: do not remove from the axis AG_options 'labelWidth' and 'labelHeight' because -//this will slow down the animation -var lbl_x_width = 100; -var lbl_x_height = 30; -var zoom_range = [0.1, 20]; - - -// the index of the cached file (the file that was loaded asynchronous) -var cachedFileIndex = 0; -//The displayed labels for the graph -var chanDisplayLabels = []; -// setup plot -var AG_options = { - grid: { - backgroundColor: 'white', - hoverable: true, - clickable: true - }, - points: { - show: false, - radius: 0.001 - }, - zoom: { - interactive: false - }, - selection: { - mode: "xy" - }, - legend: { - show: false - } -}; - -var DEFAULT_MAX_CHANNELS = 10; -var plot = null; - -var followingLine = []; -//The required position from which the following vertical time line will start moving with the array -//Expressed as a number from [0, 1], 0 - start from begining, 1 start only at end -var procentualLinePosition = 0.5; -//The actual position in the graph of the following vertical line. Start from -speed to account for the initial translation. -var currentLinePosition = 0; -//The number of points used to display the vertical line. -var numberOfPointsForVerticalLine = 1000; -var isDoubleView = false; - -var AG_homeViewYValues = []; -var AG_homeViewXValues = {zoomRange: zoom_range, labelWidth: lbl_x_width, labelHeight: lbl_x_height}; -//This will be set to true in the launch_viewer method called by burst small previews -var isSmallPreview = false; - -var targetVerticalLinePosition; - -// The base url for calling any methods on a given datatype -var baseDataURLS = []; -var nrOfPagesSet = []; -var dataPageSize = []; -var tsModes = [0, 0, 0]; -var tsStates = [0, 0, 0]; -var longestChannelIndex = 0; - -// region selection component -var AG_regionSelector = null; -// State mode selector. Used as a global only in dual view -var AG_modeSelector = null; - - -// GID for the D3 viewer -var filterGid = null; - -//timeseries viewer -var ts = null; - -window.onresize = function () { - resizeToFillParent(); - // redrawPlot(plot.getData()); -}; - -/** - * Animated graph entry point - */ -function AG_startAnimatedChart(ag_settings) { - isSmallPreview = false; - _AG_initGlobals(ag_settings); - _AG_initPaginationState(ag_settings.number_of_visible_points); - _AG_preStart(); - drawSliderForScale(); - drawSliderForAnimationSpeed(); - _AG_init_selection(ag_settings.measurePointsSelectionGIDs); - - -} - -function AG_startAnimatedChartPreview(ag_settings) { - isSmallPreview = true; - AG_isStopped = true; - _AG_initGlobals(ag_settings); - _AG_initPaginationState(ag_settings.number_of_visible_points); - _AG_preStart(); - - // Initialize AG_submitableSelectedChannels - // warning: Assumes channel values are a range - if (AG_submitableSelectedChannels.length === 0) { - // Viewer breaks if this is empty. Fill the first few channels - const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); - for (let i = 0; i < defaultSelectionLength; i++) { - AG_submitableSelectedChannels.push(i); - } - } - - refreshChannels(); -} - -function AG_rePaginate(number_of_visible_points) { - _AG_initPaginationState(number_of_visible_points); - $('#display-page-size').html('' + number_of_visible_points); - refreshChannels(); - if (isDoubleView) { - initActivityData(); - } -} - -/** - * Initialize global state. Part of the AG startup. - * @private - */ -function _AG_initGlobals(ag_settings) { - isDoubleView = ag_settings.extended_view; - // dataSetUrls = $.parseJSON(dataSetPaths); - baseDataURLS = ag_settings.baseURLS; - nrOfPagesSet = ag_settings.nrOfPages; - dataPageSize = ag_settings.pageSize; - chanDisplayLabels = ag_settings.channelLabels; - noOfChannelsPerSet = ag_settings.channelsPerSet; - timeSetUrls = ag_settings.timeSetPaths; - maxChannelLength = parseInt(ag_settings.pageSize); - AG_normalizationSteps = ag_settings.normalizedSteps; - setMaxDataFileIndex(nrOfPagesSet); - totalNumberOfChannels = ag_settings.noOfChannels; - totalTimeLength = ag_settings.totalLength; - nanValueFound = ag_settings.nan_value_found; - AG_computedStep = ag_settings.translationStep; - -} - -/** - * Initialize pagination. Part of AG startup. - * @private - */ -function _AG_initPaginationState(number_of_visible_points) { - AG_numberOfVisiblePoints = parseInt(number_of_visible_points); - if (AG_numberOfVisiblePoints > maxChannelLength) { - AG_numberOfVisiblePoints = maxChannelLength; - } - targetVerticalLinePosition = AG_numberOfVisiblePoints * procentualLinePosition; -} - -/** - * Misc common startup logic. Part of AG startup - * @private - */ -function _AG_preStart() { - // resizeToFillParent(); -} - -/** - * Creates a selection component for each time series displayed by this eeg view - * Part of AG startup - * The order of the filterGids determines the order of the selectors - * It must have the same ordering as all other timeseries arrays - * @private - */ -function _AG_init_selection(filterGids) { - let i; - let selectors = []; - - filterGid = filterGids; - - /** - * Returns the selected channel indices as interpreted by AG_submitableSelectedChannels - * ( starting at 0 and ending at len(timeseries_0_channels) + ... + len(timeseries_final_channels) ) - */ - function getSelectedChannelsAsGlobalIndices() { - let all_selected = []; - let offset = 0; - - for (let i = 0; i < selectors.length; i++) { - const selector = selectors[i]; - const selected_in_current = selector.val(); - - for (let j = 0; j < selected_in_current.length; j++) { - all_selected.push(offset + parseInt(selected_in_current[j], 10)); - } - offset += selector._allValues.length; - } - return all_selected; - } - - // init selectors - let selectorId, selector; - - for (i = 0; i < filterGids.length; i++) { - selectorId = "#channelSelector" + i; - selector = TVBUI.regionSelector(selectorId, {filterGid: filterGids[i]}); - selector.change(function (current_selection) { - AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); - refreshChannels(); - }); - selectors.push(selector); - } - // the first selector is special. we select by default some channels in it and in case of a dual view - // his selection is synchronized with the brain - AG_regionSelector = selectors[0]; - - // Initialize AG_submitableSelectedChannels - AG_submitableSelectedChannels = getSelectedChannelsAsGlobalIndices(); - - if (AG_submitableSelectedChannels.length === 0) { - // Viewer breaks if this is empty. Fill the first few channels - const defaultSelectionLength = Math.min(totalNumberOfChannels, DEFAULT_MAX_CHANNELS); - // we take the values form the dom, a range(defaultSelectionLength) is not a valid selection if there are multiple time series - AG_submitableSelectedChannels = AG_regionSelector._allValues.slice(0, defaultSelectionLength); - AG_regionSelector.val(AG_submitableSelectedChannels); - } - - // Init the mode selection components. Assumes that there are part of the selector dom - let modeSelectors = []; - for (i = 0; i < filterGids.length; i++) { - selectorId = "#channelSelector" + i; - selector = TVBUI.modeAndStateSelector(selectorId, i); - selector.modeChanged(_AG_changeMode); - selector.stateVariableChanged(_AG_changeStateVariable); - modeSelectors.push(selector); - } - // The dual view needs to subscribe to this selector; so we save it like AG_regionSelector - AG_modeSelector = modeSelectors[0]; - - refreshChannels(); -} - -/** - * Read speed from the dom - * @param defaultSpeed default speed when there is no speed slider - * @private - */ -function _AG_get_speed(defaultSpeed) { - let speed = defaultSpeed; - if (!isSmallPreview && !isDoubleView) { - speed = $("#ctrl-input-speed").slider("value"); - } - return speed; -} - -function refreshChannels() { - submitSelectedChannels(false); -} - -function _AG_changeMode(tsIndex, val) { - tsModes[tsIndex] = parseInt(val); - refreshChannels(); -} - -function _AG_changeStateVariable(tsIndex, val) { - tsStates[tsIndex] = parseInt(val); - refreshChannels(); -} - - -//this function is used in virtualBrain.js keep it for now -function drawGraph() { - -} - -function _AG_getSelectedDataAndLongestChannelIndex(data) { - let offset = 0; - let selectedData = []; - let channelLengths = []; - - for (let i = 0; i < data.length; i++) { - const selectedChannels = getDisplayedChannels(data[i], offset); - offset += data[i].length; - if (selectedChannels.length > 0) { - channelLengths.push(selectedChannels[0].length); - } else { - channelLengths.push(-1); - } - selectedData = selectedData.concat(selectedChannels); - } - const longestChannelIndex = channelLengths.indexOf(Math.max.apply(Math, channelLengths)); - return {selectedData: selectedData, longestChannelIndex: longestChannelIndex} -} - -/* - * Get required data for the channels in AG_submitableSelectedChannels. If none - * exist then just use the previous 'displayedChannels' (or default in case of first run). - */ -function submitSelectedChannels(isEndOfData) { - AG_currentIndex = AG_numberOfVisiblePoints; - if (AG_submitableSelectedChannels.length === 0) { - AG_submitableSelectedChannels = displayedChannels.slice(); - } - - if (!(isEndOfData && maxDataFileIndex === 0)) { - AG_allPoints = []; - displayedChannels = AG_submitableSelectedChannels.slice(0); - generateChannelColors(displayedChannels.length); - - let results = []; - for (let i = 0; i < nrOfPagesSet.length; i++) { - const dataURL = readDataPageURL(baseDataURLS[i], 0, dataPageSize, tsStates[i], tsModes[i]); - const data = HLPR_readJSONfromFile(dataURL); - results.push(parseData(data, i)); - } - const r = _AG_getSelectedDataAndLongestChannelIndex(results); - AG_allPoints = AG_allPoints.concat(r.selectedData); - longestChannelIndex = r.longestChannelIndex; - - // keep data only for the selected channels - AG_noOfLines = AG_allPoints.length; - } - - AG_displayedPoints = []; - AG_displayedTimes = []; - for (let ii = 0; ii < AG_noOfLines; ii++) { - AG_displayedPoints.push([]); - } - - if (!(isEndOfData && maxDataFileIndex === 0)) { - //read time - readTimeData(0, false); - AG_time = nextTimeData.slice(0); - } - // reset data - nextData = []; - nextTimeData = []; - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - currentDataFileIndex = 0; - totalPassedData = 0; - currentLinePosition = 0; - if (nanValueFound) { - displayMessage('The given data contains some NaN values. All the NaN values were replaced by zero.', 'warningMessage'); - } - - - //The shape we use for time series now only uses 1D - var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; - var selectedLabels = [] - for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { - selectedLabels.push([chanDisplayLabels[displayedChannels[i]]]); - } - - //use d3 to create 2D plot - ts = tv.plot.time_series(); - ts.baseURL(baseDataURLS[0]).preview(false).mode(0).state_var(0); - ts.shape(dataShape).t0(AG_time[1] / 2).dt(AG_time[1]); - ts.labels(selectedLabels); - ts.channels(AG_submitableSelectedChannels); - - - resizeToFillParent(ts); - $('#time-series-viewer').empty(); - ts(d3.select("#time-series-viewer")); - tsView = ts; - - VS_selectedchannels=tsView.channels(); - - // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value - _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; - - $("#ctrl-input-scale").slider({ - value: 50, min: 0, max: 100, - slide: function (event, target) { - _updateScalingFromSlider(target.value); - } - }); - -} - - -function resizeToFillParent(ts) { - var container, width, height; - - container = $('#eegSectionId').parent(); - width = container.width(); - - //minus toolbar's height - height = container.height() - 60; - - ts.w(width).h(height); - -} - -function _updateScalingFromSlider(value) { - if (value == null) { - value = $("#ctrl-input-scale").slider("value"); - } - var expo_scale = (value - 50) / 50; // [1 .. -1] - var scale = Math.pow(10, expo_scale * 4); // [1000..-1000] - tsView.magic_fcs_amp_scl = _initial_magic_fcs_amp_scl * scale; - tsView.prepare_data(); - tsView.render_focus(); - - if (scale >= 1) { - $("#display-scale").html("1 * " + scale.toFixed(2)); - } else { - $("#display-scale").html("1 / " + (1 / scale).toFixed(2)); - } - -} - - -/** - * This method decides if we are at the beginning or end of the graph, in which case we only need - * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. - */ -function shouldMoveLine(direction, shiftNo) { - shiftNo = shiftNo || 1; - let isEndOfGraph = false; - let isStartOfGraph = false; - if (direction === 1) { - isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); - isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); - if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { - isEndOfGraph = false; - } - } else { - isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); - isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); - if (AG_displayedTimes[currentLinePosition] <= 0) { - isStartOfGraph = false; - } - } - - return isStartOfGraph || isEndOfGraph; -} - -var isEndOfData = false; -var AG_channelColorsDict = {}; -var AG_reversedChannelColorsDict = {}; - -/* - * Generate different colors for each channel. - */ -function generateChannelColors(nr_of_channels) { - AG_channelColorsDict = {}; - AG_reversedChannelColorsDict = {}; - let step = parseInt(255 / nr_of_channels); - for (let i = 0; i < nr_of_channels; i++) { - const color = "rgb(" + 250 * (i % 2) + "," + (200 - i * step) + "," + 220 * ((i + 1) % 2) + ")"; - AG_channelColorsDict[color] = i; - AG_reversedChannelColorsDict[i] = color; - } -} - - -/** - * Translate the given value. - * We use this method to translate the values for the drawn line charts because we don't want them to overlap. - * - * @param value the value that should be translated. - * @param index the number of AG_translationSteps that should be used for translating the given value. - * @return {number} - */ -function AG_addTranslationStep(value, index) { - return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; -} - -function getTimeoutBasedOnSpeed() { - const currentAnimationSpeedValue = _AG_get_speed(40); - if (currentAnimationSpeedValue === 0) { - return 300; - } - const timeout = 10 - Math.abs(currentAnimationSpeedValue); - if (timeout === 9) { - return 3000; - } - if (timeout === 8) { - return 2000; - } - if (timeout === 7) { - return 1000; - } - return timeout * 100 + 25; -} - -/* - * Load the data from a given step and center plot around that step. - */ -function loadEEGChartFromTimeStep(step) { - // Read all data for the page in which the selected step falls into - const chunkForStep = Math.floor(step / dataPageSize); - const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); - const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; - AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); - AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); - totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page - currentDataFileIndex = chunkForStep; - AG_displayedPoints = []; - const indexInPage = step % dataPageSize; // This is the index in the current page that step will have - let fromIdx, toIdx; - currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times - if (indexInPage <= AG_numberOfVisiblePoints / 2) { - if (chunkForStep === 0) { - // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first - // AG_numberOfVisiblePoints values - AG_currentIndex = AG_numberOfVisiblePoints; - currentLinePosition = indexInPage; - prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from before this page - addFromPreviousPage(indexInPage, chunkForStep); - } - } else { - if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { - if (chunkForStep >= nrOfPagesSet[0] - 1) { - // We are at the end of the graph. The line is starting to move further right from the middle position. We are just - // displaying the last AG_numberOfVisiblePoints from the last page - if (AG_time.length > AG_numberOfVisiblePoints) { - fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; - } else { - fromIdx = 0; - } - toIdx = AG_time.length - 1; - AG_currentIndex = toIdx; - currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from after this page - addFromNextPage(indexInPage, chunkForStep); - } - } else { - // We are somewhere in the middle of the graph. - fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; - toIdx = indexInPage + AG_numberOfVisiblePoints / 2; - AG_currentIndex = toIdx; - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } - } - nextData = []; - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; -} - -/* - * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center - * around indexInPage, if some of the required data is on the previous page. - */ -function addFromPreviousPage(indexInPage, currentPage) { - - const previousPageUrl = readDataPageURL(baseDataURLS[0], (currentPage - 1) * dataPageSize, currentPage * dataPageSize, tsStates[0], tsModes[0]); - let previousData = parseData(HLPR_readJSONfromFile(previousPageUrl), 0); - previousData = getDisplayedChannels(previousData, 0).slice(0); - const previousTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage - 1]); - // Compute which slices we would need from the 'full' two-pages data. - // We only need the difference so to center indexInPage at AG_numberOfVisiblePoints / 2 - let fromIdx, toIdx; - fromIdx = previousData[0].length - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is from where we need to read from previous data - AG_currentIndex = toIdx = AG_numberOfVisiblePoints - (AG_numberOfVisiblePoints / 2 - indexInPage); // This is where we need to add from the current page - // Just generate displayed point and displayed times now - for (let idx = 0; idx < previousData.length; idx++) { - let idy; - let oneLine = []; - // Push data that is from previos slice - for (idy = fromIdx; idy < previousData[0].length; idy++) { - oneLine.push([previousTimeData[idy], AG_addTranslationStep(previousData[idx][idy], idx)]); - } - // Now that that is from our current slice - for (idy = 0; idy < toIdx; idy++) { - oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = previousTimeData.slice(fromIdx).concat(AG_time.slice(0, toIdx)); - previousData = null; -} - -/* - * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center - * around indexInPage, if some of the required data is on the next page. - */ -function addFromNextPage(indexInPage, currentPage) { - - const followingPageUrl = readDataPageURL(baseDataURLS[0], (currentPage + 1) * dataPageSize, (currentPage + 2) * dataPageSize, tsStates[0], tsModes[0]); - let followingData = parseData(HLPR_readJSONfromFile(followingPageUrl), 0); - followingData = getDisplayedChannels(followingData, 0).slice(0); - const followingTimeData = HLPR_readJSONfromFile(timeSetUrls[0][currentPage + 1]); - let fromIdx, toIdx; - fromIdx = indexInPage - (AG_numberOfVisiblePoints / 2); // We need to read starting from here from the current page - AG_currentIndex = toIdx = fromIdx + AG_numberOfVisiblePoints - AG_allPoints[0].length; // We need to read up to here from next page - for (let idx = 0; idx < AG_allPoints.length; idx++) { - let idy; - const oneLine = []; - // Push data that is from this slice - for (idy = fromIdx; idy < AG_allPoints[0].length; idy++) { - oneLine.push([AG_time[idy], AG_addTranslationStep(AG_allPoints[idx][idy], idx)]); - } - // Now that that is from next slice - for (idy = 0; idy < toIdx; idy++) { - oneLine.push([followingTimeData[idy], AG_addTranslationStep(followingData[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = AG_time.slice(fromIdx).concat(followingTimeData.slice(0, toIdx)); - // Since next page is already loaded, that becomes the current page - AG_allPoints = followingData; - AG_time = followingTimeData; - totalPassedData = (currentPage + 1) * dataPageSize; - currentDataFileIndex = currentPage + 1; - isNextDataLoaded = true; - isNextTimeDataLoaded = true; -} - -/* - * Just re-populate whole displayedPoints and displayedTimes given a start and end index. - */ -function prepareDisplayData(fromIdx, toIdx, pointsArray, timeArray) { - - for (let idx = 0; idx < pointsArray.length; idx++) { - let oneLine = []; - for (let idy = fromIdx; idy < toIdx; idy++) { - oneLine.push([timeArray[idy], AG_addTranslationStep(pointsArray[idx][idy], idx)]); - } - AG_displayedPoints.push(oneLine); - } - AG_displayedTimes = timeArray.slice(fromIdx, toIdx) -} - -/* - * Read the next data file asyncronously. Also get the corresponding time data file. - */ -function loadNextDataFile() { - AG_isLoadStarted = true; - const nx_idx = getNextDataFileIndex(); - cachedFileIndex = nx_idx; - AG_readFileDataAsynchronous(nrOfPagesSet, noOfChannelsPerSet, nx_idx, maxChannelLength, 0); - readTimeData(nx_idx, true); -} - -function changeCurrentDataFile() { - if (!isNextDataLoaded || !isNextTimeDataLoaded) { - return; - } - - if (cachedFileIndex !== getNextDataFileIndex()) { - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - nextData = []; - nextTimeData = []; - return; - } - - const speed = _AG_get_speed(100); - const longestChannelLength = AG_allPoints[longestChannelIndex].length; - - if (speed > 0) { - totalPassedData = totalPassedData + longestChannelLength; - if (longestChannelLength < AG_currentIndex) { - AG_currentIndex = -(longestChannelLength - AG_currentIndex); - } else { - AG_currentIndex = 0; - } - } else if (speed < 0) { - totalPassedData = totalPassedData - longestChannelLength; - if (totalPassedData < 0) { - totalPassedData = 0; - } - } else { - return; - } - - AG_allPoints = nextData.slice(0); - nextData = []; - AG_time = nextTimeData.slice(0); - nextTimeData = []; - currentDataFileIndex = getNextDataFileIndex(); - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; - - if (speed < 0) { - AG_currentIndex = longestChannelLength + AG_currentIndex; - } -} - -function shouldLoadNextDataFile() { - if (!AG_isLoadStarted && maxDataFileIndex > 0) { - const nextFileIndex = getNextDataFileIndex(); - const speed = _AG_get_speed(1); // Assume left to right pass of data - if (currentDataFileIndex !== nextFileIndex) { - if ((speed > 0) && (maxChannelLength - AG_currentIndex < threshold * AG_numberOfVisiblePoints)) { - return true; - } - if ((speed < 0) && (AG_currentIndex - AG_numberOfVisiblePoints < threshold * AG_numberOfVisiblePoints)) { - return true; - } - } - } - return false; -} - -/* - * In case of multiple arrays find out which has the most data files that need - * to be loaded. - */ -function setMaxDataFileIndex(nrOfPagesPerArray) { - let max_ln = 0; - for (let i = 0; i < nrOfPagesPerArray.length; i++) { - if (nrOfPagesPerArray[i] > max_ln) { - max_ln = nrOfPagesPerArray[i]; - } - } - maxDataFileIndex = max_ln - 1; -} - -/* - * Return the index of the next data file that should be loaded. - */ -function getNextDataFileIndex() { - let nextIndex; - const speed = _AG_get_speed(100); - if (speed > 0) { - nextIndex = currentDataFileIndex + 1; - if (nextIndex >= maxDataFileIndex) { - return maxDataFileIndex; - } - } else { - nextIndex = currentDataFileIndex - 1; - if (nextIndex <= 0) { - return 0; - } - } - return nextIndex; -} - -function AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex) { - if (dataSetIndex >= nrOfPages.length) { - isNextDataLoaded = true; - // keep data only for the selected channels - const r = _AG_getSelectedDataAndLongestChannelIndex(nextData); - longestChannelIndex = r.longestChannelIndex; - nextData = r.selectedData; //todo: occasional shape mismatch 3d <- 2d - return; - } - if (nrOfPages[dataSetIndex] - 1 < currentFileIndex && AG_isLoadStarted) { - // todo: assumed that this is computing a padding for smaller signals. check if this is really the purpose of this - let j; - let padding = []; - let oneChannel = []; - for (j = 0; j < maxChannelLength; j++) { - oneChannel.push(0); - } - for (j = 0; j < noOfChannelsPerSet[dataSetIndex]; j++) { - padding.push(oneChannel); - } - nextData.push(padding); - - AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); - } else { - doAjaxCall({ - url: readDataPageURL(baseDataURLS[dataSetIndex], currentFileIndex * dataPageSize, (currentFileIndex + 1) * dataPageSize, tsStates[dataSetIndex], tsModes[dataSetIndex]), - success: function (data) { - if (AG_isLoadStarted) { - data = $.parseJSON(data); - const result = parseData(data, dataSetIndex); - nextData.push(result); - - AG_readFileDataAsynchronous(nrOfPages, noOfChannelsPerSet, currentFileIndex, maxChannelLength, dataSetIndex + 1); - } - } - }); - } -} - -/* - * Data is received from the HLPR_parseJSON as a 500/74 array. We need to transform it - * into an 74/500 one and in the transformation also replace all NaN values. - */ -function parseData(dataArray, dataSetIndex) { - - let result = []; - for (let i = 0; i < noOfChannelsPerSet[dataSetIndex]; i++) { - result.push([]); - } - for (let j = 0; j < dataArray.length; j++) { - for (let k = 0; k < noOfChannelsPerSet[dataSetIndex]; k++) { - let arrElem = dataArray[j][k]; - if (arrElem === 'NaN') { - nanValueFound = true; - arrElem = 0; - } - result[k].push(arrElem); - } - } - return result; -} - -/** - * - * @param fileIndex - * @param asyncRead true only if the file should be read asynchronous - */ -function readTimeData(fileIndex, asyncRead) { - if (timeSetUrls[longestChannelIndex].length <= fileIndex) { - nextTimeData = []; - for (let i = 0; i < maxChannelLength; i++) { - nextTimeData.push(totalPassedData + i); - } - isNextTimeDataLoaded = true; - } else { - if (asyncRead) { - doAjaxCall({ - url: timeSetUrls[longestChannelIndex][fileIndex], - success: function (data) { - nextTimeData = $.parseJSON(data); - isNextTimeDataLoaded = true; - } - }); - } else { - nextTimeData = HLPR_readJSONfromFile(timeSetUrls[longestChannelIndex][fileIndex]); - isNextTimeDataLoaded = true; - } - } -} - -function getArrayFromDataFile(dataFile) { - let fileData = dataFile.replace(/\n/g, " ").replace(/\t/g, " "); - let arrayData = $.trim(fileData).split(" "); - for (let i = 0; i < arrayData.length; i++) { - arrayData[i] = parseFloat(arrayData[i]); - } - return arrayData; -} - -function getDisplayedChannels(listOfAllChannels, offset) { - let selectedData = []; - for (let i = 0; i < displayedChannels.length; i++) { - if (listOfAllChannels[displayedChannels[i] - offset] !== undefined) { - selectedData.push(listOfAllChannels[displayedChannels[i] - offset].slice(0)); - } - } - return selectedData; -} - - -//------------------------------------------------START ZOOM RELATED CODE-------------------------------------------------------- -function stopAnimation() { - AG_isStopped = !AG_isStopped; - var btn = $("#ctrl-action-pause"); - if (AG_isStopped) { - btn.html("Start"); - btn.attr("class", "action action-controller-launch"); - } else { - btn.html("Pause"); - btn.attr("class", "action action-controller-pause"); - } - -} - - -//------------------------------------------------START SCALE RELATED CODE-------------------------------------------------------- - - -function drawSliderForScale() { - function _onchange() { - /** When scaling, we need to redraw the graph and update the HTML with the new values. - */ - var spacing = $("#ctrl-input-spacing").slider("value") / 4; - var scale = $("#ctrl-input-scale").slider("value"); - - if (spacing >= 0 && AG_currentIndex <= AG_numberOfVisiblePoints) { - AG_currentIndex = AG_numberOfVisiblePoints; - } else if (spacing < 0 && (AG_allPoints[0].length - AG_currentIndex) < AG_numberOfVisiblePoints) { - AG_currentIndex = AG_allPoints[0].length; - } - AG_displayedPoints = []; - for (var i = 0; i < AG_noOfLines; i++) { - AG_displayedPoints.push([]); - } - _updateScaleFactor(scale); - } - - $("#ctrl-input-scale").slider({value: 1, min: 1, max: 32, change: _onchange}); - - $("#display-scale").html("" + AG_scaling); -} - -function _updateScaleFactor(scale) { - AG_scaling = scale; - $("#display-scale").html("" + AG_scaling); -} - -//------------------------------------------------END SCALE RELATED CODE-------------------------------------------------------- - -//------------------------------------------------START SPEED RELATED CODE-------------------------------------------------------- - -function drawSliderForAnimationSpeed() { - $("#ctrl-input-speed").slider({ - orientation: 'horizontal', - value: 3, - min: -50, - max: 50, - change: function (event, ui) { - updateSpeedFactor(); - } - }); -} - - -function updateSpeedFactor() { - var speed = $("#ctrl-input-speed").slider("option", "value"); - $('#display-speed').html('' + speed); - AG_isSpeedZero = (speed == 0); -} - -//------------------------------------------------END SPEED RELATED CODE-------------------------------------------------------- -//------------------------------------------------START TIME SERIES TIME SELECTION RELATED CODE-------------------------------------------------------- - - function intervalSet(){ - var start=$('#SetIntervalStart').val(); - var end=$('#SetIntervalEnd').val(); - if(start Date: Wed, 1 Aug 2018 15:08:15 +1000 Subject: [PATCH 33/53] TVB-2379 Rename animated_graph.js to dualBrainViewer.js --- .../scripts/{animated_graph.js => dualBrainViewer.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/{animated_graph.js => dualBrainViewer.js} (100%) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js similarity index 100% rename from tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/animated_graph.js rename to tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js From 965f66ee1ceb89d355f4d3fd72ef46a22da1a441 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Thu, 2 Aug 2018 16:24:29 +1000 Subject: [PATCH 34/53] TVB-2368 Restore the original svg/d3 viewer while keeping the time selection layout in the 3d Update the d3 version for the svg/d3 viewer --- tvb/interfaces/web/static/js/tvbviz.js | 195 ++++++++++++++++-- .../time_series/scripts/timeseriesSVG.js | 2 + .../genshi/visualizers/time_series/view.html | 2 +- 3 files changed, 177 insertions(+), 22 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 94ee24076..9fb5a3fdf 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -629,7 +629,6 @@ tv.plot = { if (!f.we_are_setup) { f.render_contexts(); f.add_brushes(); - f.br_fcs_endfn(true); // no_render=true f.we_are_setup = true; } @@ -659,8 +658,15 @@ tv.plot = { f.pad = {x: (0 ? f.w() : f.h()) * f.p(), y: f.h() * f.p()}; f.ul_ctx_y = {x: f.pad.x, y: f.pad.y}; f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; - f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; - f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y / 2}; + if(f.viewer_type()==='svg'){ + f.ul_ctx_x = {x: 2 * f.pad.x + f.sz_ctx_y.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; + f.sz_ctx_x = {x: f.w() - 3 * f.pad.x- f.sz_ctx_y.x, y: f.pad.y / 2}; + } + else{ + f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; + f.sz_ctx_x = {x: f.w() - 2 * f.pad.x, y: f.pad.y / 2}; + } + f.ul_fcs = {x: f.ul_ctx_x.x, y: f.ul_ctx_y.y}; f.sz_fcs = {x: f.sz_ctx_x.x, y: f.sz_ctx_y.y}; @@ -755,7 +761,6 @@ tv.plot = { // setup groups, scales and axes for context and focus areas f.do_scaffolding = function (rgp) { - // main groups for vertical and horizontal context areas and focus area f.gp_ctx_x = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_x.x + ", " + f.ul_ctx_x.y + ")"); f.gp_ctx_x.append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y).classed("tv-data-bg", true); @@ -766,7 +771,6 @@ tv.plot = { // the plotted time series in the focus and x ctx area are subject to a clipping region new_clip_path(rgp, "fig-lines-clip").append("rect").attr("width", f.sz_fcs.x).attr("height", f.sz_fcs.y); - // new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); // group with clip path applied for the focus lines f.gp_lines = f.gp_fcs.append("g").attr("style", "clip-path: url(#fig-lines-clip)") @@ -795,6 +799,21 @@ tv.plot = { f.gp_ax_fcs_x = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_x); f.gp_ax_fcs_y = f.gp_fcs.append("g").classed("axis", true).call(f.ax_fcs_y); + + if(f.viewer_type()==='svg'){ + // main groups for vertical and horizontal context areas and focus area + f.gp_ctx_y = rgp.append("g").attr("transform", "translate(" + f.ul_ctx_y.x + ", " + f.ul_ctx_y.y + ")"); + f.gp_ctx_y.append("rect").attr("width", f.sz_ctx_y.x).attr("height", f.sz_ctx_y.y).classed("tv-data-bg", true); + + new_clip_path(rgp, "fig-ctx-x-clip").append("rect").attr("width", f.sz_ctx_x.x).attr("height", f.sz_ctx_x.y); + + // vertical context groups + f.ax_ctx_y = d3.axisLeft(f.sc_ctx_y); + f.ax_ctx_y.tickFormat(f.signal_tick_labeler); + f.gp_ax_ctx_y = f.gp_ctx_y.append("g").classed("axis", true).call(f.ax_ctx_y); + } + + }; f.prepare_data = function () { @@ -902,8 +921,6 @@ tv.plot = { if (!f.we_are_setup) { - - f.line_paths = g.enter() .append("g") .attr("transform", function (d, i) { @@ -929,10 +946,67 @@ tv.plot = { }; f.render_contexts = function () { + // draw context lines and average + if(f.viewer_type()==='svg'){ + var ts = f.ts(); + + // horizontal context line + var f1 = f.gp_ctx_x.append("g").attr("style", "clip-path: url(#fig-ctx-x-clip)"); + var f2 = f1.selectAll("g").data([f.da_x]).enter(); + var f3 = f2.append("g") + .attr("transform", function () { + return "translate(0, " + (f.sz_ctx_x.y / 2) + ") scale(1, 0.5)"; + }) + .classed("tv-ctx-line", true); + var f4 = f3.append("path") + .attr("d", d3.line() + .x(function (d, i) { + var time_start = f.sc_ctx_x.domain()[0]; + return f.sc_ctx_x((time_start + i + 0.5) * f.da_x_dt); + }) + .y(function (d) { + return d * f.sz_ctx_x.y; + })); + + // error on context line + // TODO the data for this path needs to be re done so that it traces above and below + // the mean line. + var da_x_len = f.da_x.length; + + f.gp_ctx_x.append("g").attr("style", "clip-path: url(#fig-ctx-x-clip)") + .selectAll("g").data([f.da_x.concat(f.da_x.slice().reverse())]) + .enter() + .append("g").attr("transform", "translate(0, " + f.sz_ctx_x.y / 2 + ") scale(1, 0.5)") + .classed("tv-ctx-error", true) + .append("path") + .attr("d", d3.line() + .x(function (d, i) { + var idx = (i < da_x_len) ? i : (2 * da_x_len - i); + var time_start = f.sc_ctx_x.domain()[0]; + return f.sc_ctx_x((time_start + idx) * f.da_x_dt); + }) + .y(function (d, i) { + var std = (i < da_x_len) ? f.da_xs[i] : -f.da_xs[2 * da_x_len - i - 1]; + return f.sz_ctx_x.y * (d + std); + })); - // originally used to draw context lines and average + // vertical context lines + f.gp_ctx_y.append("g").selectAll("g").data(f.da_y) + .enter() + .append("g").attr("transform", function (d, i) { + return "translate(0, " + f.sc_ctx_y(i) + ")"; + }) + .classed("tv-ctx-line", true) + .append("path") + .attr("d", d3.line().x(function (d, i) { + return 2 + (f.sz_ctx_y.x - 2) * i / f.sz_ctx_y.x; + }) + .y(function (d) { + return d; + })); + } }; f.scale_focus_stroke = function () { @@ -990,10 +1064,14 @@ tv.plot = { sc = f.sc_fcs_x; x_scaling = scale_brushed.domain()[1] / (dom[1] - dom[0]); sc.domain(dom); - f.sc_ctx_x.domain(dom); f.gp_ax_fcs_x.call(f.ax_fcs_x); - f.gp_ax_ctx_x.call(f.ax_ctx_x); - + + //keep the x context in the same range for the svg viewer + if(f.viewer_type()!='svg'){ + f.sc_ctx_x.domain(dom); + f.gp_ax_ctx_x.call(f.ax_ctx_x); + } + // TODO: This seems to cause problems with negative values and commenting it out does not seem to // cause any additional problems. This could do with some double checking. @@ -1034,10 +1112,71 @@ tv.plot = { }; f.br_ctx_y_fn = br_ctx_y_fn; + //move the focus with the x context brush in the svg viewer, different from the fn that evokes from the focus brush + br_ctx_x_move=function(){ + var event_selection = []; + // Different extent when it is: + //1.from the brush of 2D Focus Brush + if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection[0] = d3.event.selection[0][0]; + event_selection[1] = d3.event.selection[1][0]; + } + //2.from the end of focus brush + else if (d3.event.selection == null) { + event_selection = [f.sc_ctx_x.range()[0], f.sc_ctx_x.range()[1]]; - br_ctx_end = function () { + } + + //3.from itself + else { + event_selection = d3.event.selection; + } + //selection is now in coordinates and we have to map it using scales + event_selection = event_selection.map(f.sc_ctx_x.invert, f.sc_ctx_x); + var dom = f.br_ctx_x === null ? f.sc_ctx_x.domain() : event_selection + , sc = f.sc_fcs_x + , x_scaling = f.sc_ctx_x.domain()[1] / (dom[1] - dom[0]); - //get the selected time range + sc.domain(dom); + + + f.gp_ax_fcs_x.call(f.ax_fcs_x); + + // TODO: This seems to cause problems with negative values and commenting it out does not seem to + // cause any additional problems. This could do with some double checking. + f.gp_lines.attr("transform", "translate(" + sc(0) + ", 0) scale(" + x_scaling + ", 1)"); + } + + br_ctx_y_move=function(){ + var event_selection = []; + if (d3.event.selection != null && d3.event.selection[0][0] != null) { + event_selection[0] = d3.event.selection[0][1]; + event_selection[1] = d3.event.selection[1][1]; + } + else if (d3.event.selection == null) { + event_selection = f.sc_ctx_y.range(); + } + else { + event_selection[0] = d3.event.selection[1]; + event_selection[1] = d3.event.selection[0]; + + } + event_selection = event_selection.map(f.sc_ctx_y.invert, f.sc_ctx_y); + var dom = f.br_ctx_y === null ? f.sc_ctx_y.domain() : event_selection; + var yscl = f.sz_fcs.y / (dom[1] - dom[0]) / 5; + f.sc_fcs_y.domain(dom).range([f.sz_ctx_y.y, 0]); + + f.gp_ax_fcs_y.call(f.ax_fcs_y); + f.gp_lines.selectAll("g").attr("transform", function (d, i) { + return "translate(0, " + f.sc_fcs_y(i) + ")" + "scale (1, " + yscl + ")" + }).selectAll("path").attr("stroke-width", "" + (3 / yscl)); + f.scale_focus_stroke(); + } + + br_ctx_end = function () { + if(f.viewer_type()==='svg'){} + else{ + //get the selected time range var event_selection_x = []; if (d3.event.selection != null) { event_selection_x[0] = d3.event.selection[0]; @@ -1054,7 +1193,7 @@ tv.plot = { if (d3.event.selection != null) { f.timeselection_update_fn(triggered_by_timeselection) } - + } }; @@ -1112,9 +1251,28 @@ tv.plot = { } + f.br_fcs_brush = function () { + if(f.viewer_type()==='svg'){ + var ex = d3.event.selection; + f.gp_br_ctx_x.call(f.br_ctx_x); + //assign directly because can't use brushSelection to set null brushes' extent + f.gp_br_ctx_x.node().__brush.selection = [[ex[0][0], 0], [ex[1][0], f.sz_ctx_x.y]]; + + f.gp_br_ctx_y.call(f.br_ctx_y); + f.gp_br_ctx_y.node().__brush.selection = [[0, ex[0][1]], [f.sz_ctx_y.x, ex[1][1]]]; + } + }; // create brushes - f.br_ctx_x = d3.brushX().extent([[f.sc_ctx_x.range()[0], 0], [f.sc_ctx_x.range()[1], f.sz_ctx_x.y]]).on("end", br_ctx_end); + if(f.viewer_type()==='svg'){ + // y context brush + f.br_ctx_y = d3.brushY().extent([[0, 0], [f.sz_ctx_y.x, f.sz_ctx_y.y]]).on("brush", br_ctx_y_move); + f.gp_br_ctx_y = f.gp_ctx_y.append("g"); + f.gp_br_ctx_y.append("g").classed("brush", true).call(f.br_ctx_y).selectAll("rect").attr("width", f.sz_ctx_y.x); + } + + f.br_ctx_x = d3.brushX().extent([[f.sc_ctx_x.range()[0], 0], [f.sc_ctx_x.range()[1], f.sz_ctx_x.y]]).on("end", br_ctx_end) + .on("brush", br_ctx_x_move); f.br_fcs = d3.brush().extent([[f.sc_fcs_x.range()[0], 0], [f.sc_fcs_x.range()[1], f.sz_fcs.y]]) .on("end", f.br_fcs_endfn).on("start", f.br_fcs_startfn) .on("brush", f.br_fcs_brush); @@ -1125,14 +1283,10 @@ tv.plot = { f.timeselection_title = f.gp_br_ctx_x.append("text").text("Time Selection").attr("y", -10); f.gp_br_ctx_x.classed("brush", true).attr("class", "time-selection-brush").call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); - //add main focus brush group f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); - - }; - //functions for the time selection window f.timeselection_update_fn = function (triggered) { //display the selected time range @@ -1165,7 +1319,6 @@ tv.plot = { redrawSelection() }; - //TODO need to fix one additional step brought by any change function redrawSelection() { if (parseInt(timeselection[1]) == parseInt(f.sc_ctx_x.domain()[1])) { @@ -1213,7 +1366,7 @@ tv.plot = { }; f.parameters = ["w", "h", "p", "baseURL", "preview", "labels", "shape", - "t0", "dt", "ts", "ys", "point_limit", "channels", "mode", "state_var"]; + "t0", "dt", "ts", "ys", "point_limit", "channels", "mode", "state_var","viewer_type"]; f.parameters.map(function (name) { f[name] = tv.util.gen_access(f, name); }); diff --git a/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js b/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js index 3eb552d33..9316e70fd 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js @@ -129,6 +129,8 @@ function initTimeseriesViewer(baseURL, isPreview, dataShape, t0, dt, channelLabe ts.shape(dataShape).t0(t0).dt(dt); ts.labels(_compute_labels_for_current_selection()); ts.channels(TS_SVG_selectedChannels); + //used in tvbviz to render context lines + ts.viewer_type('svg'); // run resizeToFillParent(ts); ts(d3.select("#time-series-viewer")); diff --git a/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html b/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html index 32c4db0cb..7d7e36805 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/time_series/view.html @@ -1,5 +1,5 @@
    - + From 0fa068c5987939260a0cc4268765bbc07c8bec08 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 3 Aug 2018 14:50:23 +1000 Subject: [PATCH 35/53] TVB-2379 Fix reference to gl header html template in internal html template --- .../visualizers/new_dual_brain/dual_brain_3d_internal_view.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html index b0b458b19..860eba190 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/dual_brain_3d_internal_view.html @@ -2,7 +2,7 @@ xmlns:py="http://genshi.edgewall.org/" xmlns:xi="http://www.w3.org/2001/XInclude"> - + From 559757d694f088ef83ede538d10791f9e62edff1 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Sun, 5 Aug 2018 17:36:03 +1000 Subject: [PATCH 36/53] TVB-2372 Remove unnecessary channel number array and use the existing VS_selectedRegions Unify the function name of creating energy sphere buffers Add blockoverlay for the energy retrieving async calls Avoid redraw the brush when triggering from the input field Add viewer type for the new dual brain viewer TVB-2370 Reset the domain to the last position in the time data --- tvb/interfaces/web/static/js/tvbviz.js | 48 +++++++++++-------- .../commons/scripts/virtualBrain.js | 19 +++----- .../new_dual_brain/scripts/dualBrainViewer.js | 3 +- 3 files changed, 36 insertions(+), 34 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 9fb5a3fdf..752c6983b 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -40,6 +40,7 @@ var timeselection = []; // identify the initiator of the change of the time selection: brushing or movie timeline var triggered_by_timeselection = true; +var triggered_by_changeinput=false; //store the unmapped selection value used to animate the time selection window var selection_x = []; @@ -586,7 +587,8 @@ tv.plot = { f.energy_callback = function (data) { timeselection_energy = data; - changeCubicalMeasurePoints_energy(); + changeSphereMeasurePoints_energy(); + closeBlockerOverlay(); }; f.render = function () { @@ -778,8 +780,8 @@ tv.plot = { // scales for vertical and horizontal context, and the x and y axis of the focus area f.sc_ctx_y = d3.scaleLinear().domain([-1, f.shape()[2]]).range([f.sz_ctx_y.y, 0]); - f.sc_ctx_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_ctx_x.x]); - f.sc_fcs_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * f.shape()[0]]).range([0, f.sz_fcs.x]); + f.sc_ctx_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * (f.shape()[0]-1)]).range([0, f.sz_ctx_x.x]); + f.sc_fcs_x = d3.scaleLinear().domain([f.t0(), f.t0() + f.dt() * (f.shape()[0]-1)]).range([0, f.sz_fcs.x]); f.sc_fcs_y = d3.scaleLinear().domain([-1, f.shape()[2] + 1]).range([f.sz_fcs.y, 0]); @@ -1042,7 +1044,7 @@ tv.plot = { //2.from the end of focus brush else if (d3.event.selection == null) { event_selection_x = [f.sc_ctx_x.range()[0], f.sc_ctx_x.range()[1]]; - f.dom_x = [f.t0(), f.t0() + f.dt() * f.shape()[0]]; + f.dom_x = [f.t0(), f.t0() + f.dt() * (f.shape()[0]-1)]; } //3.from itself else { @@ -1114,7 +1116,8 @@ tv.plot = { f.br_ctx_y_fn = br_ctx_y_fn; //move the focus with the x context brush in the svg viewer, different from the fn that evokes from the focus brush br_ctx_x_move=function(){ - var event_selection = []; + if(f.viewer_type()==='svg'){ + var event_selection = []; // Different extent when it is: //1.from the brush of 2D Focus Brush if (d3.event.selection != null && d3.event.selection[0][0] != null) { @@ -1145,6 +1148,7 @@ tv.plot = { // TODO: This seems to cause problems with negative values and commenting it out does not seem to // cause any additional problems. This could do with some double checking. f.gp_lines.attr("transform", "translate(" + sc(0) + ", 0) scale(" + x_scaling + ", 1)"); + } } br_ctx_y_move=function(){ @@ -1191,7 +1195,7 @@ tv.plot = { //change the actual time point in the slider if (d3.event.selection != null) { - f.timeselection_update_fn(triggered_by_timeselection) + f.timeselection_update_fn() } } @@ -1288,44 +1292,48 @@ tv.plot = { }; //functions for the time selection window - f.timeselection_update_fn = function (triggered) { + f.timeselection_update_fn = function () { //display the selected time range d3.select("#SetIntervalStart").property('value', timeselection[0].toFixed(2)); d3.select("#SetIntervalEnd").property('value', timeselection[1].toFixed(2)); $("#info-interval").html((timeselection[1] - timeselection[0]).toFixed(2) + "ms"); - if (triggered) { + if (triggered_by_timeselection) { timeselection_interval = timeselection[1] - timeselection[0]; timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; //retrieve energy for the whole timeline rather than a slice - var all_slice=f.current_slice(); - all_slice[0].di=f.shape()[1]; - all_slice[0].hi=f.shape()[0]; - all_slice[0].lo=0; + var all_slice = f.current_slice(); + all_slice[0].di = f.shape()[1]; + all_slice[0].hi = f.shape()[0]; + all_slice[0].lo = 0; - //call the energy computation method + //call the energy computation method and block until get the enery data + showBlockerOverlay(50000); tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); //update the time in the input tag - d3.select("#TimeNow").property('value', timeselection[0].toFixed(2)); - //update the time in the 3d viewer's time var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); + triggered_by_changeinput = true; + $('#TimeNow').val(time_index) $('#slider').slider('value', time_index); - loadFromTimeStep(parseInt(timeselection[0])); + triggered_by_changeinput = false; + loadFromTimeStep(time_index); } }; //move the time selection window with the slider f.timeselection_move_fn = function () { - redrawSelection() + //only change by movie playing + if (!triggered_by_changeinput) { + redrawSelection() + } }; - //TODO need to fix one additional step brought by any change function redrawSelection() { if (parseInt(timeselection[1]) == parseInt(f.sc_ctx_x.domain()[1])) { f.jump_to_next_time_range() } else if (timeselection[0] >= f.sc_ctx_x.domain()[1] - f.dt()) { - dom = [0, f.t0() + f.dt() * f.shape()[0]]; + dom = [0, f.t0() + f.dt() * (f.shape()[0] - 1)]; f.sc_ctx_x.domain(dom); f.gp_ax_ctx_x.call(f.ax_ctx_x); d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [0, timeselection[1] - timeselection[0]].map(f.sc_ctx_x)); @@ -1355,7 +1363,7 @@ tv.plot = { else if (f.current_slice()[0].hi == current_slice_length) { } else { - dom = [timeselection[1], f.t0() + f.dt() * f.shape()[0]]; + dom = [timeselection[1], f.t0() + f.dt() * (f.shape()[0] - 1)]; f.sc_ctx_x.domain(dom); f.gp_ax_ctx_x.call(f.ax_ctx_x); } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 8fc142302..b506753f8 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -159,12 +159,9 @@ var near = 0.1; // index of the currently selected node. This is equivalent to CONN_pickedIndex var VS_pickedIndex = -1; -//selected channels used to color the energy spheres -var VS_selectedchannels=[]; var VB_BrainNavigator; -//default time selection time -var timeselection_interval=0; + //indicating we are drawing the energy spheres and applying material colors var isDrawingSpheres = false; /** @@ -583,7 +580,7 @@ function _initSliders() { currentTimeValue = target.value; $('#TimeNow').val(currentTimeValue); }, - change: function (event, ui) { + change: function () { triggered_by_timeselection = false; tsView.timeselection_move_fn(); triggered_by_timeselection = true; @@ -1005,7 +1002,7 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac gl.uniform1f(GL_shaderProgram.alphaUniform, 1); // set sphere color green for the selected channels ones and yellow for the others - if (VS_selectedchannels.includes(i)) { + if (VS_selectedRegions.includes(i)) { gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); drawBuffer(drawMode, buffersSets[i]); } @@ -1112,8 +1109,7 @@ function tick() { //update energy if(timeselection_interval!=0 && !AG_isStopped){ - - changeCubicalMeasurePoints_energy(); + changeSphereMeasurePoints_energy(); } drawScene(); @@ -1402,11 +1398,10 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ END DATA RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// /////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// //init spheres with energy controlling the radius -function changeCubicalMeasurePoints_energy() { - selectedchannels=tsView.channels(); - for (let i = 0; i < selectedchannels.length; i++) { +function changeSphereMeasurePoints_energy() { + for (let i = 0; i < VS_selectedRegions.length; i++) { // generate spheres - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[selectedchannels[i]],timeselection_energy[i][currentTimeValue]); + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[VS_selectedRegions[i]],timeselection_energy[i][currentTimeValue]); const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index 775764102..d60365682 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -168,7 +168,6 @@ var ts = null; window.onresize = function () { resizeToFillParent(); - // redrawPlot(plot.getData()); }; /** @@ -452,6 +451,7 @@ function submitSelectedChannels(isEndOfData) { ts.shape(dataShape).t0(AG_time[1] / 2).dt(AG_time[1]); ts.labels(selectedLabels); ts.channels(AG_submitableSelectedChannels); + ts.viewer_type('dualbrain'); resizeToFillParent(ts); @@ -459,7 +459,6 @@ function submitSelectedChannels(isEndOfData) { ts(d3.select("#time-series-viewer")); tsView = ts; - VS_selectedchannels=tsView.channels(); // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; From a0de3d2fdc63b13c9f8caece79ab4be2d8f48f77 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 6 Aug 2018 15:12:30 +1000 Subject: [PATCH 37/53] TVB-2372 Syncing the time displayed in 3D viewer's slider from the time selection in the 2d plot --- .../genshi/visualizers/commons/scripts/virtualBrain.js | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index b506753f8..0e5258a8a 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -1125,7 +1125,14 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { - document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); + //syncing time with the d3 plot + if (tsView.viewer_type() === 'dualbrain') { + //add dt because the 2d will add one step after the slider changes + document.getElementById("TimeNow").value = (timeselection[0]+tsView.dt()).toFixed(2); + } + else { + document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); + } } let meanFrameTime = 0; for (let i = 0; i < framestime.length; i++) { From 9f22515930c3f4e3a8a7a226863ec867e65eb34e Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 7 Aug 2018 15:28:08 +1000 Subject: [PATCH 38/53] TVB-2372 Fix time display in the slider when time selection is empty --- .../visualizers/commons/scripts/virtualBrain.js | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 0e5258a8a..512bac5dd 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -581,9 +581,12 @@ function _initSliders() { $('#TimeNow').val(currentTimeValue); }, change: function () { - triggered_by_timeselection = false; + if (timeselection_interval!=0){ + triggered_by_timeselection = false; tsView.timeselection_move_fn(); triggered_by_timeselection = true; + } + }, stop: function (event, target) { sliderSel = false; @@ -1126,9 +1129,9 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { //syncing time with the d3 plot - if (tsView.viewer_type() === 'dualbrain') { - //add dt because the 2d will add one step after the slider changes - document.getElementById("TimeNow").value = (timeselection[0]+tsView.dt()).toFixed(2); + //add dt because the 2d will add one step after the slider changes + if (timeselection[0]>0) { + document.getElementById("TimeNow").value = (timeselection[0] + tsView.dt()).toFixed(2); } else { document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); From 24af584cd2bfe916f8a5a087f1c4854ea786d8af Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 7 Aug 2018 16:35:10 +1000 Subject: [PATCH 39/53] TVB-2369 Fix wrong sphere buffers generated for the internal sensors --- tvb/interfaces/web/static/js/tvbviz.js | 7 ++++++- .../visualizers/commons/scripts/internalBrain.js | 13 ++++++++++++- .../visualizers/commons/scripts/virtualBrain.js | 15 +++++++++++---- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 752c6983b..69b6d26ee 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -587,7 +587,12 @@ tv.plot = { f.energy_callback = function (data) { timeselection_energy = data; - changeSphereMeasurePoints_energy(); + if (isInternalSensorView) { + VSI_change_energySphericalMeasurePoints() + } + else { + changeSphereMeasurePoints_energy(); + } closeBlockerOverlay(); }; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js index 868179aa0..b25dd1a63 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js @@ -26,6 +26,17 @@ function _VSI_bufferAtPoint(p, idx) { return [bufferVertices, bufferNormals, bufferTriangles, vertexRegionBuffer]; } +function VSI_change_energySphericalMeasurePoints() { + for (let i = 0; i < VS_selectedRegions.length; i++) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[i][currentTimeValue], 12, 12); + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const vertexRegionBuffer = VSI_createColorBufferForSphere(i, bufferVertices.numItems * 3); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, vertexRegionBuffer]; + } +} + /** * Method used for creating a color buffer for a cube (measure point). */ @@ -75,5 +86,5 @@ function VSI_StartInternalActivityViewer(baseDatatypeURL, onePageSize, urlTimeLi _VSI_init_sphericalMeasurePoints(); //pause by default - AG_isStopped = true;; + AG_isStopped = true; } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 512bac5dd..7e8d9da72 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -1004,7 +1004,7 @@ function drawBuffers(drawMode, buffersSets, bufferSetsMask, useBlending, cullFac gl.uniform1i(GL_shaderProgram.useVertexColors, false); gl.uniform1f(GL_shaderProgram.alphaUniform, 1); - // set sphere color green for the selected channels ones and yellow for the others + // set sphere color green for the selected channels and yellow for the others if (VS_selectedRegions.includes(i)) { gl.uniform4f(GL_shaderProgram.materialColor, 0.99, 0.99, 0.0, 1.0); drawBuffer(drawMode, buffersSets[i]); @@ -1112,7 +1112,12 @@ function tick() { //update energy if(timeselection_interval!=0 && !AG_isStopped){ - changeSphereMeasurePoints_energy(); + if(isInternalSensorView){ + VSI_change_energySphericalMeasurePoints() + } + else{ + changeSphereMeasurePoints_energy(); + } } drawScene(); @@ -1184,9 +1189,10 @@ function drawScene() { } if (isInternalSensorView) { - gl.uniform1f(GL_shaderProgram.alphaUniform, 1); + // for internal sensors we render only the sensors drawBuffers(gl.TRIANGLES, measurePointsBuffers); - } else { + } + else { //draw the nodes first to make it appear if (displayMeasureNodes) { isDrawingSpheres = true; @@ -1419,4 +1425,5 @@ function changeSphereMeasurePoints_energy() { measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; } } + /////////////////////////////////////// ~~~~~~~~~~ END ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// \ No newline at end of file From fe325eded35f717255fb51b25b27ea8c6895a2fa Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 10 Aug 2018 14:09:00 +1000 Subject: [PATCH 40/53] TVB-2372 Syncing the start time(t0) to the 3d slider Fix syncing when timestep is not one per frame --- tvb/interfaces/web/static/js/tvbviz.js | 13 +++---------- .../visualizers/commons/scripts/virtualBrain.js | 15 +++++++++++---- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 69b6d26ee..9721c2013 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1318,7 +1318,7 @@ tv.plot = { //update the time in the input tag var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); triggered_by_changeinput = true; - $('#TimeNow').val(time_index) + $('#TimeNow').val(timeselection[0].toFixed(2)); $('#slider').slider('value', time_index); triggered_by_changeinput = false; loadFromTimeStep(time_index); @@ -1343,15 +1343,8 @@ tv.plot = { f.gp_ax_ctx_x.call(f.ax_ctx_x); d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [0, timeselection[1] - timeselection[0]].map(f.sc_ctx_x)); } - if (timeStepsPerTick > 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * timeStepsPerTick, timeselection[1] + f.dt() * timeStepsPerTick].map(f.sc_ctx_x)); - } - else if (timeStepsPerTick < 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt() * 1 / (1 / timeStepsPerTick + 1), timeselection[1] + f.dt() * 1 / (1 / timeStepsPerTick + 1)].map(f.sc_ctx_x)); - } - else if (timeStepsPerTick === 1) { - d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [timeselection[0] + f.dt(), timeselection[1] + f.dt()].map(f.sc_ctx_x)); - } + + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [f.t0()+currentTimeValue*f.dt(),f.t0()+currentTimeValue*f.dt()+timeselection_interval].map(f.sc_ctx_x)); } f.jump_to_next_time_range = function () { diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 7e8d9da72..448970816 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -579,6 +579,9 @@ function _initSliders() { sliderSel = true; currentTimeValue = target.value; $('#TimeNow').val(currentTimeValue); + if(timeselection_interval!=0){ + tsView.timeselection_move_fn(); + } }, change: function () { if (timeselection_interval!=0){ @@ -1133,12 +1136,16 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { - //syncing time with the d3 plot - //add dt because the 2d will add one step after the slider changes - if (timeselection[0]>0) { - document.getElementById("TimeNow").value = (timeselection[0] + tsView.dt()).toFixed(2); + if (timeselection[0] >= 0 && timeStepsPerTick < 1) { + document.getElementById("TimeNow").value = (timeselection[0]).toFixed(2); + } + else if (timeselection[0] > 0 && timeStepsPerTick >= 1) { + //syncing time with the d3 plot + //add dt because the 2d will add one step after the slider changes + document.getElementById("TimeNow").value = (timeselection[0]+ tsView.dt()*timeStepsPerTick).toFixed(2); } else { + //3d movie playing only document.getElementById("TimeNow").value = toSignificantDigits(timeData[currentTimeValue], 2); } } From 336adcab4b5842ecf3c36110b2f2a62f7a950cda Mon Sep 17 00:00:00 2001 From: kimonoki Date: Sun, 12 Aug 2018 15:12:47 +1000 Subject: [PATCH 41/53] TVB-2379 Remove unused FLOT functions --- tvb/interfaces/web/static/js/tvbviz.js | 5 +- .../commons/scripts/virtualBrain.js | 23 +-- .../new_dual_brain/scripts/dualBrainViewer.js | 178 +----------------- 3 files changed, 14 insertions(+), 192 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 9721c2013..d7b59668f 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -33,7 +33,7 @@ /* global tv, d3 */ -//added globals for time selection +//add globals for time selection in the new dual brain viewer for time series data var timeselection_interval_length = 0;//integer var timeselection_interval = 0; var timeselection = []; @@ -41,8 +41,6 @@ var timeselection = []; // identify the initiator of the change of the time selection: brushing or movie timeline var triggered_by_timeselection = true; var triggered_by_changeinput=false; -//store the unmapped selection value used to animate the time selection window -var selection_x = []; //store the energy calculated from the time selection var timeselection_energy = []; @@ -1190,7 +1188,6 @@ tv.plot = { if (d3.event.selection != null) { event_selection_x[0] = d3.event.selection[0]; event_selection_x[1] = d3.event.selection[1]; - selection_x = event_selection_x; } event_selection_x = event_selection_x.map(f.sc_ctx_x.invert); timeselection = event_selection_x; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 448970816..3cd29da08 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -579,12 +579,12 @@ function _initSliders() { sliderSel = true; currentTimeValue = target.value; $('#TimeNow').val(currentTimeValue); - if(timeselection_interval!=0){ + if(isDoubleView && timeselection_interval!=0){ tsView.timeselection_move_fn(); } }, change: function () { - if (timeselection_interval!=0){ + if (isDoubleView && timeselection_interval!=0){ triggered_by_timeselection = false; tsView.timeselection_move_fn(); triggered_by_timeselection = true; @@ -1090,10 +1090,6 @@ function tick() { if (currentTimeValue > MAX_TIME) { // Next time value is no longer in activity data. initActivityData(); - if (isDoubleView) { - loadEEGChartFromTimeStep(0); - drawGraph(false, 0); - } shouldStep = false; } @@ -1104,9 +1100,6 @@ function tick() { if (shouldChangeCurrentActivitiesFile()) { changeCurrentActivitiesFile(); } - if (isDoubleView) { - drawGraph(true, TIME_STEP); - } } } @@ -1114,7 +1107,7 @@ function tick() { updateColors(currentTimeInFrame); //update energy - if(timeselection_interval!=0 && !AG_isStopped){ + if(isDoubleView && timeselection_interval!=0 && !AG_isStopped){ if(isInternalSensorView){ VSI_change_energySphericalMeasurePoints() } @@ -1136,13 +1129,13 @@ function tick() { lastTime = timeNow; if (timeData.length > 0 && !AG_isStopped) { - if (timeselection[0] >= 0 && timeStepsPerTick < 1) { + if (isDoubleView && timeselection[0] >= 0 && timeStepsPerTick < 1) { document.getElementById("TimeNow").value = (timeselection[0]).toFixed(2); } - else if (timeselection[0] > 0 && timeStepsPerTick >= 1) { + else if (isDoubleView && timeselection[0] > 0 && timeStepsPerTick >= 1) { //syncing time with the d3 plot //add dt because the 2d will add one step after the slider changes - document.getElementById("TimeNow").value = (timeselection[0]+ tsView.dt()*timeStepsPerTick).toFixed(2); + document.getElementById("TimeNow").value = (timeselection[0] + tsView.dt() * timeStepsPerTick).toFixed(2); } else { //3d movie playing only @@ -1312,10 +1305,6 @@ function loadFromTimeStep(step) { nextActivitiesFileData = null; currentActivitiesFileLength = activitiesData.length * TIME_STEP; totalPassedActivitiesData = currentTimeValue; - // Also sync eeg monitor if in double view - if (isDoubleView) { - loadEEGChartFromTimeStep(step); - } closeBlockerOverlay(); } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index d60365682..43165468e 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -178,7 +178,6 @@ function AG_startAnimatedChart(ag_settings) { _AG_initGlobals(ag_settings); _AG_initPaginationState(ag_settings.number_of_visible_points); _AG_preStart(); - drawSliderForScale(); drawSliderForAnimationSpeed(); _AG_init_selection(ag_settings.measurePointsSelectionGIDs); @@ -359,12 +358,6 @@ function _AG_changeStateVariable(tsIndex, val) { refreshChannels(); } - -//this function is used in virtualBrain.js keep it for now -function drawGraph() { - -} - function _AG_getSelectedDataAndLongestChannelIndex(data) { let offset = 0; let selectedData = []; @@ -472,7 +465,6 @@ function submitSelectedChannels(isEndOfData) { } - function resizeToFillParent(ts) { var container, width, height; @@ -505,31 +497,6 @@ function _updateScalingFromSlider(value) { } -/** - * This method decides if we are at the beginning or end of the graph, in which case we only need - * to move the vertical line, or in between, where vertical line is not moving, instead arrays are shifted. - */ -function shouldMoveLine(direction, shiftNo) { - shiftNo = shiftNo || 1; - let isEndOfGraph = false; - let isStartOfGraph = false; - if (direction === 1) { - isEndOfGraph = ((totalPassedData + AG_currentIndex + noOfShiftedPoints >= totalTimeLength) && (currentLinePosition < AG_numberOfVisiblePoints + shiftNo)); - isStartOfGraph = (currentLinePosition < targetVerticalLinePosition); - if (AG_displayedTimes[currentLinePosition] > AG_displayedPoints[longestChannelIndex][AG_displayedPoints[longestChannelIndex].length - 1][0]) { - isEndOfGraph = false; - } - } else { - isEndOfGraph = (currentLinePosition > targetVerticalLinePosition); - isStartOfGraph = ((totalPassedData + AG_currentIndex - noOfShiftedPoints < AG_numberOfVisiblePoints) && (currentLinePosition > 0)); - if (AG_displayedTimes[currentLinePosition] <= 0) { - isStartOfGraph = false; - } - } - - return isStartOfGraph || isEndOfGraph; -} - var isEndOfData = false; var AG_channelColorsDict = {}; var AG_reversedChannelColorsDict = {}; @@ -548,7 +515,6 @@ function generateChannelColors(nr_of_channels) { } } - /** * Translate the given value. * We use this method to translate the values for the drawn line charts because we don't want them to overlap. @@ -561,85 +527,6 @@ function AG_addTranslationStep(value, index) { return value * AG_scaling - AG_normalizationSteps[displayedChannels[index]] + AG_translationStep * AG_computedStep * index; } -function getTimeoutBasedOnSpeed() { - const currentAnimationSpeedValue = _AG_get_speed(40); - if (currentAnimationSpeedValue === 0) { - return 300; - } - const timeout = 10 - Math.abs(currentAnimationSpeedValue); - if (timeout === 9) { - return 3000; - } - if (timeout === 8) { - return 2000; - } - if (timeout === 7) { - return 1000; - } - return timeout * 100 + 25; -} - -/* - * Load the data from a given step and center plot around that step. - */ -function loadEEGChartFromTimeStep(step) { - // Read all data for the page in which the selected step falls into - const chunkForStep = Math.floor(step / dataPageSize); - const dataUrl = readDataPageURL(baseDataURLS[0], chunkForStep * dataPageSize, (chunkForStep + 1) * dataPageSize, tsStates[0], tsModes[0]); - const dataPage = [parseData(HLPR_readJSONfromFile(dataUrl), 0)]; - AG_allPoints = getDisplayedChannels(dataPage[0], 0).slice(0); - AG_time = HLPR_readJSONfromFile(timeSetUrls[0][chunkForStep]).slice(0); - totalPassedData = chunkForStep * dataPageSize; // New passed data will be all data until the start of this page - currentDataFileIndex = chunkForStep; - AG_displayedPoints = []; - const indexInPage = step % dataPageSize; // This is the index in the current page that step will have - let fromIdx, toIdx; - currentLinePosition = AG_numberOfVisiblePoints / 2; // Assume we are not end or beginning since that will be most of the times - if (indexInPage <= AG_numberOfVisiblePoints / 2) { - if (chunkForStep === 0) { - // We are at the beginning of the graph, line did not reach middle point yet, and we are still displaying the first - // AG_numberOfVisiblePoints values - AG_currentIndex = AG_numberOfVisiblePoints; - currentLinePosition = indexInPage; - prepareDisplayData(0, AG_numberOfVisiblePoints, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from before this page - addFromPreviousPage(indexInPage, chunkForStep); - } - } else { - if ((indexInPage >= pageSize - AG_numberOfVisiblePoints / 2) || (nrOfPagesSet[0] === 1 && indexInPage + AG_numberOfVisiblePoints / 2 > AG_time.length)) { - if (chunkForStep >= nrOfPagesSet[0] - 1) { - // We are at the end of the graph. The line is starting to move further right from the middle position. We are just - // displaying the last AG_numberOfVisiblePoints from the last page - if (AG_time.length > AG_numberOfVisiblePoints) { - fromIdx = AG_time.length - 1 - AG_numberOfVisiblePoints; - } else { - fromIdx = 0; - } - toIdx = AG_time.length - 1; - AG_currentIndex = toIdx; - currentLinePosition = AG_numberOfVisiblePoints - (AG_time.length - 1 - indexInPage); - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } else { - // We are at an edge case between pages. So in order to have all the - // AG_numberOfVisiblePoints we need to also load the points from after this page - addFromNextPage(indexInPage, chunkForStep); - } - } else { - // We are somewhere in the middle of the graph. - fromIdx = indexInPage - AG_numberOfVisiblePoints / 2; - toIdx = indexInPage + AG_numberOfVisiblePoints / 2; - AG_currentIndex = toIdx; - prepareDisplayData(fromIdx, toIdx, AG_allPoints, AG_time); - } - } - nextData = []; - AG_isLoadStarted = false; - isNextDataLoaded = false; - isNextTimeDataLoaded = false; -} - /* * Add all required data to AG_displayedPoints and AG_displayedTimes in order to center * around indexInPage, if some of the required data is on the previous page. @@ -941,56 +828,6 @@ function getDisplayedChannels(listOfAllChannels, offset) { return selectedData; } - -//------------------------------------------------START ZOOM RELATED CODE-------------------------------------------------------- -function stopAnimation() { - AG_isStopped = !AG_isStopped; - var btn = $("#ctrl-action-pause"); - if (AG_isStopped) { - btn.html("Start"); - btn.attr("class", "action action-controller-launch"); - } else { - btn.html("Pause"); - btn.attr("class", "action action-controller-pause"); - } - -} - - -//------------------------------------------------START SCALE RELATED CODE-------------------------------------------------------- - - -function drawSliderForScale() { - function _onchange() { - /** When scaling, we need to redraw the graph and update the HTML with the new values. - */ - var spacing = $("#ctrl-input-spacing").slider("value") / 4; - var scale = $("#ctrl-input-scale").slider("value"); - - if (spacing >= 0 && AG_currentIndex <= AG_numberOfVisiblePoints) { - AG_currentIndex = AG_numberOfVisiblePoints; - } else if (spacing < 0 && (AG_allPoints[0].length - AG_currentIndex) < AG_numberOfVisiblePoints) { - AG_currentIndex = AG_allPoints[0].length; - } - AG_displayedPoints = []; - for (var i = 0; i < AG_noOfLines; i++) { - AG_displayedPoints.push([]); - } - _updateScaleFactor(scale); - } - - $("#ctrl-input-scale").slider({value: 1, min: 1, max: 32, change: _onchange}); - - $("#display-scale").html("" + AG_scaling); -} - -function _updateScaleFactor(scale) { - AG_scaling = scale; - $("#display-scale").html("" + AG_scaling); -} - -//------------------------------------------------END SCALE RELATED CODE-------------------------------------------------------- - //------------------------------------------------START SPEED RELATED CODE-------------------------------------------------------- function drawSliderForAnimationSpeed() { @@ -1005,7 +842,6 @@ function drawSliderForAnimationSpeed() { }); } - function updateSpeedFactor() { var speed = $("#ctrl-input-speed").slider("option", "value"); $('#display-speed').html('' + speed); @@ -1015,10 +851,10 @@ function updateSpeedFactor() { //------------------------------------------------END SPEED RELATED CODE-------------------------------------------------------- //------------------------------------------------START TIME SERIES TIME SELECTION RELATED CODE-------------------------------------------------------- - function intervalSet(){ - var start=$('#SetIntervalStart').val(); - var end=$('#SetIntervalEnd').val(); - if(start=0 && start < end) { + tsView.timeselection_interval_set(start, end); + } +} \ No newline at end of file From a0625f7e22cb6920161758db900145cd571093fd Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 13 Aug 2018 11:11:12 +1000 Subject: [PATCH 42/53] TVB-2378 Remove unused scaling slider --- .../visualizers/new_dual_brain/controls.html | 41 +------------------ 1 file changed, 1 insertion(+), 40 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index c06dc24d7..cf39dae03 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -1,7 +1,4 @@
    - - -
    @@ -36,7 +33,7 @@ - + @@ -51,40 +48,4 @@
    - - - - - -
    - -
    - -
    - - 3 -
    -
    -
    -
    -
    -
    -
    - - -
    - - 1 -
    -
    -
    -
    -
    -
    - - -
    -
    - -
    \ No newline at end of file From 0b656dfd5f8d5168585a3d135c798593a33e84ed Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 15 Aug 2018 09:35:05 +1000 Subject: [PATCH 43/53] Add work in progress text in the new viewer's title and restore the old viewer --- tvb/adapters/visualizers/new_dual_viewer.py | 2 +- tvb/interfaces/web/structure.py | 2 +- .../genshi/visualizers/commons/scripts/virtualBrain.js | 4 ++++ 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/tvb/adapters/visualizers/new_dual_viewer.py b/tvb/adapters/visualizers/new_dual_viewer.py index 4f5ea9595..9de0ac1c3 100644 --- a/tvb/adapters/visualizers/new_dual_viewer.py +++ b/tvb/adapters/visualizers/new_dual_viewer.py @@ -45,7 +45,7 @@ class NewDualViewer(BrainViewer): New visualizer merging Brain 3D display and EEG lines display. Same input as the DualBrainViewer """ - _ui_name = "New Viewer for Time Series in 3D and 2D" + _ui_name = "(Work in progress)New Viewer for Time Series in 3D and 2D" _ui_subsection = "new_brain_dual" def get_input_tree(self): diff --git a/tvb/interfaces/web/structure.py b/tvb/interfaces/web/structure.py index efd6241bf..17da7d548 100644 --- a/tvb/interfaces/web/structure.py +++ b/tvb/interfaces/web/structure.py @@ -212,7 +212,7 @@ class WebStructure(object): SUB_SECTION_VIEW_23: "Wavelet Visualizer", SUB_SECTION_VIEW_24: "Annotations Visualizer", SUB_SECTION_VIEW_25: "Matrix Visualizer", - SUB_SECTION_VIEW_26: "New Brain Dual Activity Visualizer (3D and 2D)" + SUB_SECTION_VIEW_26: "(work in progress)New Brain Dual Activity Visualizer (3D and 2D)" } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 3cd29da08..4238d21e5 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -163,7 +163,11 @@ var VB_BrainNavigator; //indicating we are drawing the energy spheres and applying material colors + +//time selection vars that appear in the new dual view var isDrawingSpheres = false; +var timeselection_interval=0; +var timeselection = []; /** * Change transparency of cortical surface from user-input. * From b4f664efe8e2f875ecc3dbef4f0eff14cfadce17 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 15 Aug 2018 12:08:02 +1000 Subject: [PATCH 44/53] Change default opacity value to 0.3 Add default time selection --- tvb/interfaces/web/static/js/tvbviz.js | 11 +++++++++++ .../visualizers/commons/scripts/virtualBrain.js | 2 +- .../genshi/visualizers/new_dual_brain/controls.html | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index d7b59668f..f5b14888f 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1291,9 +1291,20 @@ tv.plot = { //add main focus brush group f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); + //default selection + f.timeselection_default(); }; //functions for the time selection window + f.timeselection_default = function () { + if (f.t0() + f.dt() * (f.shape()[0] - 1) > 100) { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [f.t0(), 100].map(f.sc_ctx_x)); + } + else { + d3.select(f.gp_br_ctx_x.node()).call(f.br_ctx_x.move, [f.t0(),f.t0() + f.dt() * (f.shape()[0] - 1)].map(f.sc_ctx_x)); + } + } + f.timeselection_update_fn = function () { //display the selected time range d3.select("#SetIntervalStart").property('value', timeselection[0].toFixed(2)); diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 4238d21e5..c876cb1dd 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -427,7 +427,7 @@ function VS_StartBrainActivityViewer(baseDatatypeURL, onePageSize, urlTimeList, withTransparency = transparencyStatus; //pause by default AG_isStopped = true; - _alphaValue=0.1; + _alphaValue=0.3; displayMeasureNodes=true; } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index cf39dae03..06b3ee62f 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -25,7 +25,7 @@
    From a0cf86ba28e774d87012411594d7edd141896dd8 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 15 Aug 2018 16:24:15 +1000 Subject: [PATCH 45/53] Avoid redundant calling of the energy retrieving function when the time selection window size is not changed --- tvb/interfaces/web/static/js/tvbviz.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index f5b14888f..3146186a7 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1312,6 +1312,7 @@ tv.plot = { $("#info-interval").html((timeselection[1] - timeselection[0]).toFixed(2) + "ms"); if (triggered_by_timeselection) { + var timeselection_lasttime=timeselection_interval_length; timeselection_interval = timeselection[1] - timeselection[0]; timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; //retrieve energy for the whole timeline rather than a slice @@ -1321,8 +1322,13 @@ tv.plot = { all_slice[0].lo = 0; //call the energy computation method and block until get the enery data - showBlockerOverlay(50000); - tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + if(timeselection_lasttime!=timeselection_interval_length){ + showBlockerOverlay(50000); + tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + } + else if(timeselection_lasttime===timeselection_interval_length&×election_interval_length!=0){ + changeSphereMeasurePoints_energy(); + } //update the time in the input tag var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); triggered_by_changeinput = true; From cac912b6410edaf284b4753491b989bafb7bcc85 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 17 Aug 2018 16:03:42 +1000 Subject: [PATCH 46/53] Display the whole timeline for the 2d plot --- .../visualizers/new_dual_brain/scripts/dualBrainViewer.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index 43165468e..f0e4b97bb 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -432,12 +432,11 @@ function submitSelectedChannels(isEndOfData) { //The shape we use for time series now only uses 1D - var dataShape = [AG_time.length, 1, AG_submitableSelectedChannels.length, 1]; + var dataShape = [totalTimeLength, 1, AG_submitableSelectedChannels.length, 1]; var selectedLabels = [] for (let i = 0; i < AG_submitableSelectedChannels.length; i++) { selectedLabels.push([chanDisplayLabels[displayedChannels[i]]]); } - //use d3 to create 2D plot ts = tv.plot.time_series(); ts.baseURL(baseDataURLS[0]).preview(false).mode(0).state_var(0); From 847216b2226aef108b87161fdce4331f765aaa2f Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 20 Aug 2018 18:12:18 +1000 Subject: [PATCH 47/53] Fix calling 2d drawing from 3d movie playing for the old dual brain activity viewer --- .../visualizers/commons/scripts/virtualBrain.js | 13 +++++++++++++ .../new_dual_brain/scripts/dualBrainViewer.js | 3 +++ 2 files changed, 16 insertions(+) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index c876cb1dd..940a9a1ff 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -168,6 +168,8 @@ var VB_BrainNavigator; var isDrawingSpheres = false; var timeselection_interval=0; var timeselection = []; + +var isnewDoubleView=false; /** * Change transparency of cortical surface from user-input. * @@ -1094,6 +1096,10 @@ function tick() { if (currentTimeValue > MAX_TIME) { // Next time value is no longer in activity data. initActivityData(); + if (!isnewDoubleView&&isDoubleView) { + loadEEGChartFromTimeStep(0); + drawGraph(false, 0); + } shouldStep = false; } @@ -1104,6 +1110,9 @@ function tick() { if (shouldChangeCurrentActivitiesFile()) { changeCurrentActivitiesFile(); } + if (!isnewDoubleView&&isDoubleView) { + drawGraph(true, TIME_STEP); + } } } @@ -1309,6 +1318,10 @@ function loadFromTimeStep(step) { nextActivitiesFileData = null; currentActivitiesFileLength = activitiesData.length * TIME_STEP; totalPassedActivitiesData = currentTimeValue; + // Also sync eeg monitor if in double view + if (!isnewDoubleView&&isDoubleView) { + loadEEGChartFromTimeStep(step); + } closeBlockerOverlay(); } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index f0e4b97bb..da606bd1b 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -431,6 +431,7 @@ function submitSelectedChannels(isEndOfData) { } + //create 2D plot //The shape we use for time series now only uses 1D var dataShape = [totalTimeLength, 1, AG_submitableSelectedChannels.length, 1]; var selectedLabels = [] @@ -451,6 +452,8 @@ function submitSelectedChannels(isEndOfData) { ts(d3.select("#time-series-viewer")); tsView = ts; + isnewDoubleView=true; + // This is arbitrarily set to a value. To be consistent with tsview we rescale relative to this value _initial_magic_fcs_amp_scl = tsView.magic_fcs_amp_scl; From 9938e320a163b0652ec5eeabf087e8f8756dc4d5 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 21 Aug 2018 09:15:02 +1000 Subject: [PATCH 48/53] Fix sphere size changing from dragging time selection window in 2d plot --- tvb/interfaces/web/static/js/tvbviz.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 3146186a7..1fe12e8f1 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1326,8 +1326,13 @@ tv.plot = { showBlockerOverlay(50000); tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); } - else if(timeselection_lasttime===timeselection_interval_length&×election_interval_length!=0){ - changeSphereMeasurePoints_energy(); + else if (timeselection_lasttime === timeselection_interval_length && timeselection_interval_length != 0) { + if (isInternalSensorView) { + VSI_change_energySphericalMeasurePoints() + } + else { + changeSphereMeasurePoints_energy(); + } } //update the time in the input tag var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); From 65d81b8458abcbc164fb294b564fa22e8ba0279c Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 24 Aug 2018 13:32:06 +1000 Subject: [PATCH 49/53] TVB-2359 Restore 2D plot scaling feature --- tvb/interfaces/web/static/js/tvbviz.js | 36 ++++++++++--------- .../visualizers/new_dual_brain/controls.html | 31 ++++++++++++++++ .../new_dual_brain/scripts/dualBrainViewer.js | 1 - .../time_series/scripts/timeseriesSVG.js | 1 + 4 files changed, 51 insertions(+), 18 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 1fe12e8f1..8b5e0f3cf 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -665,7 +665,7 @@ tv.plot = { f.sz_ctx_y = {x: f.pad.x * 0.8, y: f.h() - 3 * f.pad.y - f.pad.y}; if(f.viewer_type()==='svg'){ f.ul_ctx_x = {x: 2 * f.pad.x + f.sz_ctx_y.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; - f.sz_ctx_x = {x: f.w() - 3 * f.pad.x- f.sz_ctx_y.x, y: f.pad.y / 2}; + f.sz_ctx_x = {x: f.w() - 3 * f.pad.x- f.sz_ctx_y.x, y: f.pad.y}; } else{ f.ul_ctx_x = {x: f.pad.x, y: 2 * f.pad.y + f.sz_ctx_y.y}; @@ -918,7 +918,6 @@ tv.plot = { }; f.render_focus = function () { - var ts = f.ts() , g = f.gp_lines.selectAll("g").data(f.da_lines, function (d) { return d.id; @@ -942,7 +941,8 @@ tv.plot = { return f.sc_ctx_x(ts.data[i]); }) .y(function (d) { - return d; + //use the scale variable directly + return d*f.magic_fcs_amp_scl; }) (d.sig); }); @@ -953,8 +953,6 @@ tv.plot = { f.render_contexts = function () { // draw context lines and average if(f.viewer_type()==='svg'){ - var ts = f.ts(); - // horizontal context line var f1 = f.gp_ctx_x.append("g").attr("style", "clip-path: url(#fig-ctx-x-clip)"); var f2 = f1.selectAll("g").data([f.da_x]).enter(); @@ -996,7 +994,7 @@ tv.plot = { })); - // vertical context lines + // vertical context lines f.gp_ctx_y.append("g").selectAll("g").data(f.da_y) .enter() .append("g").attr("transform", function (d, i) { @@ -1070,13 +1068,13 @@ tv.plot = { x_scaling = scale_brushed.domain()[1] / (dom[1] - dom[0]); sc.domain(dom); f.gp_ax_fcs_x.call(f.ax_fcs_x); - + //keep the x context in the same range for the svg viewer if(f.viewer_type()!='svg'){ f.sc_ctx_x.domain(dom); f.gp_ax_ctx_x.call(f.ax_ctx_x); } - + // TODO: This seems to cause problems with negative values and commenting it out does not seem to // cause any additional problems. This could do with some double checking. @@ -1196,15 +1194,16 @@ tv.plot = { f.gp_ctx_x.selectAll(".selected-time").remove(); //change the actual time point in the slider - if (d3.event.selection != null) { - f.timeselection_update_fn() - } + if (f.viewer_type() === 'dualbrain' && d3.event.selection != null) { + f.timeselection_update_fn() + } } }; // on end of focus brush // this is on f so that f can call it when everything else is done.. + //this function can reset the status of the plot to the initial state f.br_fcs_endfn = function (no_render) { if (!d3.event || !d3.event.sourceEvent) { br_ctx_y_fn(); @@ -1216,8 +1215,6 @@ tv.plot = { f.gp_br_fcs.node().__brush.selection = null; f.gp_br_fcs.call(f.br_fcs); f.scale_focus_stroke(); - - }; @@ -1290,9 +1287,12 @@ tv.plot = { f.gp_br_ctx_x.classed("brush", true).attr("class", "time-selection-brush").call(f.br_ctx_x).selectAll("rect").attr("height", f.sz_ctx_x.y); //add main focus brush group - f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs); + f.gp_br_fcs = f.gp_fcs.append("g").classed("brush", true).call(f.br_fcs).call(f.br_fcs_endfn); + //default selection - f.timeselection_default(); + if(f.viewer_type()==='dualbrain'){ + f.timeselection_default(); + } }; //functions for the time selection window @@ -1326,7 +1326,7 @@ tv.plot = { showBlockerOverlay(50000); tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); } - else if (timeselection_lasttime === timeselection_interval_length && timeselection_interval_length != 0) { + else if(timeselection_lasttime===timeselection_interval_length&×election_interval_length!=0){ if (isInternalSensorView) { VSI_change_energySphericalMeasurePoints() } @@ -1340,7 +1340,9 @@ tv.plot = { $('#TimeNow').val(timeselection[0].toFixed(2)); $('#slider').slider('value', time_index); triggered_by_changeinput = false; - loadFromTimeStep(time_index); + if(f.viewer_type()==='dualbrain') { + loadFromTimeStep(time_index); + } } }; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html index 06b3ee62f..93ec2e842 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/controls.html @@ -48,4 +48,35 @@ + + +
    + +
    + +
    + + 3 +
    +
    +
    +
    +
    +
    +
    + + +
    + + 1 +
    +
    +
    +
    +
    +
    + + +
    +
    \ No newline at end of file diff --git a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js index da606bd1b..760f86027 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/new_dual_brain/scripts/dualBrainViewer.js @@ -487,7 +487,6 @@ function _updateScalingFromSlider(value) { var expo_scale = (value - 50) / 50; // [1 .. -1] var scale = Math.pow(10, expo_scale * 4); // [1000..-1000] tsView.magic_fcs_amp_scl = _initial_magic_fcs_amp_scl * scale; - tsView.prepare_data(); tsView.render_focus(); if (scale >= 1) { diff --git a/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js b/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js index 9316e70fd..08ad4f27f 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/time_series/scripts/timeseriesSVG.js @@ -189,6 +189,7 @@ function refreshChannels() { new_ts.baseURL(tsView.baseURL()).preview(tsView.preview()).mode(tsView.mode()).state_var(tsView.state_var()); new_ts.shape(shape).t0(tsView.t0()).dt(tsView.dt()); new_ts.labels(selectedLabels); + new_ts.viewer_type('svg'); // Usually the svg component shows the channels stored in TS_SVG_selectedChannels // and that variable is in sync with the selection component. // But if the selection is empty and we show a timeSeriesSurface From 6484a31bfa0be44ed94ba2ec6a62864b27f6d036 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Tue, 28 Aug 2018 17:27:55 +1000 Subject: [PATCH 50/53] TVB-2359 Fix messed sphere buffers when changed with time (wrong region and sphere index) The sphere color is controlled in draw buffer function thus we cannot use selectedRegions in the changeSphereMeasurePoints_energy function --- .../genshi/visualizers/commons/scripts/virtualBrain.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 940a9a1ff..63a5dc1fe 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -1428,9 +1428,8 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// //init spheres with energy controlling the radius function changeSphereMeasurePoints_energy() { - for (let i = 0; i < VS_selectedRegions.length; i++) { - // generate spheres - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[VS_selectedRegions[i]],timeselection_energy[i][currentTimeValue]); + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[i][currentTimeValue]);//3 for the default radius value now, we will modify it later const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; From b6e1958423615f626367e23b078764adc4d25034 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Fri, 31 Aug 2018 19:45:53 +1000 Subject: [PATCH 51/53] TVB-2359 Fix sphere rendering for non-selected channels Fix not requesting energy data while updating channel selection --- tvb/interfaces/web/static/js/tvbviz.js | 24 ++++++++++++---- .../commons/scripts/internalBrain.js | 26 ++++++++++++----- .../commons/scripts/virtualBrain.js | 28 +++++++++++++------ 3 files changed, 57 insertions(+), 21 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 8b5e0f3cf..89e65aec6 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -583,6 +583,17 @@ tv.plot = { f.render(); }; // end function f() + f.update_energy=function(){ + var all_slice = f.current_slice(); + all_slice[0].di = f.shape()[1]; + all_slice[0].hi = f.shape()[0]; + all_slice[0].lo = 0; + tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + + }; + + f.channel_lasttime=null; + f.energy_callback = function (data) { timeselection_energy = data; if (isInternalSensorView) { @@ -630,7 +641,6 @@ tv.plot = { f.prepare_data(); f.status_line.text("rendering data..."); f.render_focus(); - if (!f.we_are_setup) { f.render_contexts(); f.add_brushes(); @@ -1219,8 +1229,6 @@ tv.plot = { f.br_fcs_startfn = function () { - // we will use the left upper of the brush to do a tooltip - //select a channel var event_selection_y = []; event_selection_y[1] = d3.event.selection[0][1]; @@ -1313,18 +1321,20 @@ tv.plot = { if (triggered_by_timeselection) { var timeselection_lasttime=timeselection_interval_length; + timeselection_interval = timeselection[1] - timeselection[0]; timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; + //retrieve energy for the whole timeline rather than a slice var all_slice = f.current_slice(); all_slice[0].di = f.shape()[1]; all_slice[0].hi = f.shape()[0]; all_slice[0].lo = 0; - //call the energy computation method and block until get the enery data - if(timeselection_lasttime!=timeselection_interval_length){ + //call the energy computation method and block until get the enery data if channel or time range is changed + if(timeselection_lasttime!=timeselection_interval_length||f.channel_lasttime!==f.channels()){ showBlockerOverlay(50000); - tv.util.get_time_selection_energy(f.baseURL(), all_slice, f.energy_callback, f.channels(), f.mode(), f.state_var(), timeselection_interval_length); + f.update_energy(); } else if(timeselection_lasttime===timeselection_interval_length&×election_interval_length!=0){ if (isInternalSensorView) { @@ -1334,6 +1344,8 @@ tv.plot = { changeSphereMeasurePoints_energy(); } } + f.channel_lasttime=f.channels(); + //update the time in the input tag var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); triggered_by_changeinput = true; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js index b25dd1a63..733c5240b 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/internalBrain.js @@ -27,13 +27,25 @@ function _VSI_bufferAtPoint(p, idx) { } function VSI_change_energySphericalMeasurePoints() { - for (let i = 0; i < VS_selectedRegions.length; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[i][currentTimeValue], 12, 12); - const bufferVertices = result[0]; - const bufferNormals = result[1]; - const bufferTriangles = result[2]; - const vertexRegionBuffer = VSI_createColorBufferForSphere(i, bufferVertices.numItems * 3); - measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, vertexRegionBuffer]; + let energyIndex = 0; + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + if (tsView.channels().includes(i)) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[energyIndex][currentTimeValue], 12, 12); + energyIndex++; + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const vertexRegionBuffer = VSI_createColorBufferForSphere(i, bufferVertices.numItems * 3); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, vertexRegionBuffer]; + } + else { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 3, 12, 12); + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const vertexRegionBuffer = VSI_createColorBufferForSphere(i, bufferVertices.numItems * 3); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, vertexRegionBuffer]; + } } } diff --git a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js index 63a5dc1fe..a76040a46 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js +++ b/tvb/interfaces/web/templates/genshi/visualizers/commons/scripts/virtualBrain.js @@ -380,7 +380,7 @@ function _VS_init_cubicalMeasurePoints() { function _VS_init_sphereMeasurePoints() { for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 1);//3 for the default radius value now, we will modify it later + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 2);//2 for the default radius value now, we will modify it later const bufferVertices = result[0]; const bufferNormals = result[1]; const bufferTriangles = result[2]; @@ -1428,13 +1428,25 @@ function readFileData(fileUrl, async, callIdentifier) { /////////////////////////////////////// ~~~~~~~~~~ START ENERGY RELATED METHOD ~~~~~~~~~~~~~ ////////////////////////////////// //init spheres with energy controlling the radius function changeSphereMeasurePoints_energy() { - for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { - const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[i][currentTimeValue]);//3 for the default radius value now, we will modify it later - const bufferVertices = result[0]; - const bufferNormals = result[1]; - const bufferTriangles = result[2]; - const bufferColor = createColorBufferForCube(false); - measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + let energyIndex = 0; + for (let i = 0; i < NO_OF_MEASURE_POINTS; i++) { + if (tsView.channels().includes(i)) { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], timeselection_energy[energyIndex][currentTimeValue]); + energyIndex++; + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } + else { + const result = HLPR_sphereBufferAtPoint(gl, measurePoints[i], 2); + const bufferVertices = result[0]; + const bufferNormals = result[1]; + const bufferTriangles = result[2]; + const bufferColor = createColorBufferForCube(false); + measurePointsBuffers[i] = [bufferVertices, bufferNormals, bufferTriangles, bufferColor]; + } } } From 635cb3899e1d25ad377a09ac8b4cee8bd0ed99ca Mon Sep 17 00:00:00 2001 From: kimonoki Date: Mon, 3 Sep 2018 16:30:44 +1000 Subject: [PATCH 52/53] TVB-2359 Fix rendering sequence when dragging the selection window --- tvb/interfaces/web/static/js/tvbviz.js | 29 +++++++++++++------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/tvb/interfaces/web/static/js/tvbviz.js b/tvb/interfaces/web/static/js/tvbviz.js index 89e65aec6..b5f229a30 100644 --- a/tvb/interfaces/web/static/js/tvbviz.js +++ b/tvb/interfaces/web/static/js/tvbviz.js @@ -1320,11 +1320,20 @@ tv.plot = { $("#info-interval").html((timeselection[1] - timeselection[0]).toFixed(2) + "ms"); if (triggered_by_timeselection) { - var timeselection_lasttime=timeselection_interval_length; + //update the time in the input tag + var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); + triggered_by_changeinput = true; + $('#TimeNow').val(timeselection[0].toFixed(2)); + $('#slider').slider('value', time_index); + triggered_by_changeinput = false; + //update 3D viewer's time + if (f.viewer_type() === 'dualbrain') { + loadFromTimeStep(time_index); + } + var timeselection_lasttime = timeselection_interval_length; timeselection_interval = timeselection[1] - timeselection[0]; timeselection_interval_length = parseInt(timeselection_interval / f.dt()) - 1; - //retrieve energy for the whole timeline rather than a slice var all_slice = f.current_slice(); all_slice[0].di = f.shape()[1]; @@ -1332,11 +1341,11 @@ tv.plot = { all_slice[0].lo = 0; //call the energy computation method and block until get the enery data if channel or time range is changed - if(timeselection_lasttime!=timeselection_interval_length||f.channel_lasttime!==f.channels()){ + if (timeselection_lasttime != timeselection_interval_length || f.channel_lasttime !== f.channels()) { showBlockerOverlay(50000); f.update_energy(); } - else if(timeselection_lasttime===timeselection_interval_length&×election_interval_length!=0){ + else if (timeselection_lasttime === timeselection_interval_length && timeselection_interval_length != 0) { if (isInternalSensorView) { VSI_change_energySphericalMeasurePoints() } @@ -1344,17 +1353,7 @@ tv.plot = { changeSphereMeasurePoints_energy(); } } - f.channel_lasttime=f.channels(); - - //update the time in the input tag - var time_index = parseInt((timeselection[0] - f.t0()) / f.dt()); - triggered_by_changeinput = true; - $('#TimeNow').val(timeselection[0].toFixed(2)); - $('#slider').slider('value', time_index); - triggered_by_changeinput = false; - if(f.viewer_type()==='dualbrain') { - loadFromTimeStep(time_index); - } + f.channel_lasttime = f.channels(); } }; From db800b264c2e321860dd57c05ce97ea581e6fec4 Mon Sep 17 00:00:00 2001 From: kimonoki Date: Wed, 26 Sep 2018 15:54:02 +1000 Subject: [PATCH 53/53] TVB-2359 Remove d3v4 dependency see https://req.thevirtualbrain.org/browse/TVB-2359 --- tvb/interfaces/web/static/js/d3.v4.min.js | 2 -- tvb/interfaces/web/static/style/subsection_timeseries.css | 8 ++++---- .../genshi/visualizers/connectivity_edge_bundle/view.html | 2 +- .../genshi/visualizers/pearson_edge_bundle/view.html | 2 +- 4 files changed, 6 insertions(+), 8 deletions(-) delete mode 100644 tvb/interfaces/web/static/js/d3.v4.min.js diff --git a/tvb/interfaces/web/static/js/d3.v4.min.js b/tvb/interfaces/web/static/js/d3.v4.min.js deleted file mode 100644 index 6a2705865..000000000 --- a/tvb/interfaces/web/static/js/d3.v4.min.js +++ /dev/null @@ -1,2 +0,0 @@ -// https://d3js.org Version 4.10.0. Copyright 2017 Mike Bostock. -(function(t,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n(t.d3=t.d3||{})})(this,function(t){"use strict";function n(t){return function(n,e){return ss(t(n),e)}}function e(t,n){return[t,n]}function r(t,n,e){var r=(n-t)/Math.max(0,e),i=Math.floor(Math.log(r)/Math.LN10),o=r/Math.pow(10,i);return i>=0?(o>=Ts?10:o>=ks?5:o>=Ns?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(o>=Ts?10:o>=ks?5:o>=Ns?2:1)}function i(t,n,e){var r=Math.abs(n-t)/Math.max(0,e),i=Math.pow(10,Math.floor(Math.log(r)/Math.LN10)),o=r/i;return o>=Ts?i*=10:o>=ks?i*=5:o>=Ns&&(i*=2),n=0&&(e=t.slice(r+1),t=t.slice(0,r)),t&&!n.hasOwnProperty(t))throw new Error("unknown type: "+t);return{type:t,name:e}})}function v(t,n){for(var e,r=0,i=t.length;r=0&&(n=t.slice(e+1),t=t.slice(0,e)),{type:t,name:n}})}function T(t){return function(){var n=this.__on;if(n){for(var e,r=0,i=-1,o=n.length;rn?1:t>=n?0:NaN}function R(t){return function(){this.removeAttribute(t)}}function L(t){return function(){this.removeAttributeNS(t.space,t.local)}}function q(t,n){return function(){this.setAttribute(t,n)}}function U(t,n){return function(){this.setAttributeNS(t.space,t.local,n)}}function D(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttribute(t):this.setAttribute(t,e)}}function O(t,n){return function(){var e=n.apply(this,arguments);null==e?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,e)}}function F(t){return function(){this.style.removeProperty(t)}}function I(t,n,e){return function(){this.style.setProperty(t,n,e)}}function Y(t,n,e){return function(){var r=n.apply(this,arguments);null==r?this.style.removeProperty(t):this.style.setProperty(t,r,e)}}function B(t,n){return t.style.getPropertyValue(n)||uf(t).getComputedStyle(t,null).getPropertyValue(n)}function j(t){return function(){delete this[t]}}function H(t,n){return function(){this[t]=n}}function X(t,n){return function(){var e=n.apply(this,arguments);null==e?delete this[t]:this[t]=e}}function $(t){return t.trim().split(/^|\s+/)}function V(t){return t.classList||new W(t)}function W(t){this._node=t,this._names=$(t.getAttribute("class")||"")}function Z(t,n){for(var e=V(t),r=-1,i=n.length;++r>8&15|n>>4&240,n>>4&15|240&n,(15&n)<<4|15&n,1)):(n=gf.exec(t))?kt(parseInt(n[1],16)):(n=mf.exec(t))?new At(n[1],n[2],n[3],1):(n=xf.exec(t))?new At(255*n[1]/100,255*n[2]/100,255*n[3]/100,1):(n=bf.exec(t))?Nt(n[1],n[2],n[3],n[4]):(n=wf.exec(t))?Nt(255*n[1]/100,255*n[2]/100,255*n[3]/100,n[4]):(n=Mf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,1):(n=Tf.exec(t))?Ct(n[1],n[2]/100,n[3]/100,n[4]):kf.hasOwnProperty(t)?kt(kf[t]):"transparent"===t?new At(NaN,NaN,NaN,0):null}function kt(t){return new At(t>>16&255,t>>8&255,255&t,1)}function Nt(t,n,e,r){return r<=0&&(t=n=e=NaN),new At(t,n,e,r)}function St(t){return t instanceof Mt||(t=Tt(t)),t?(t=t.rgb(),new At(t.r,t.g,t.b,t.opacity)):new At}function Et(t,n,e,r){return 1===arguments.length?St(t):new At(t,n,e,null==r?1:r)}function At(t,n,e,r){this.r=+t,this.g=+n,this.b=+e,this.opacity=+r}function Ct(t,n,e,r){return r<=0?t=n=e=NaN:e<=0||e>=1?t=n=NaN:n<=0&&(t=NaN),new Rt(t,n,e,r)}function zt(t){if(t instanceof Rt)return new Rt(t.h,t.s,t.l,t.opacity);if(t instanceof Mt||(t=Tt(t)),!t)return new Rt;if(t instanceof Rt)return t;var n=(t=t.rgb()).r/255,e=t.g/255,r=t.b/255,i=Math.min(n,e,r),o=Math.max(n,e,r),u=NaN,a=o-i,c=(o+i)/2;return a?(u=n===o?(e-r)/a+6*(e0&&c<1?0:u,new Rt(u,a,c,t.opacity)}function Pt(t,n,e,r){return 1===arguments.length?zt(t):new Rt(t,n,e,null==r?1:r)}function Rt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Lt(t,n,e){return 255*(t<60?n+(e-n)*t/60:t<180?e:t<240?n+(e-n)*(240-t)/60:n)}function qt(t){if(t instanceof Dt)return new Dt(t.l,t.a,t.b,t.opacity);if(t instanceof Ht){var n=t.h*Nf;return new Dt(t.l,Math.cos(n)*t.c,Math.sin(n)*t.c,t.opacity)}t instanceof At||(t=St(t));var e=Yt(t.r),r=Yt(t.g),i=Yt(t.b),o=Ot((.4124564*e+.3575761*r+.1804375*i)/Ef),u=Ot((.2126729*e+.7151522*r+.072175*i)/Af);return new Dt(116*u-16,500*(o-u),200*(u-Ot((.0193339*e+.119192*r+.9503041*i)/Cf)),t.opacity)}function Ut(t,n,e,r){return 1===arguments.length?qt(t):new Dt(t,n,e,null==r?1:r)}function Dt(t,n,e,r){this.l=+t,this.a=+n,this.b=+e,this.opacity=+r}function Ot(t){return t>Lf?Math.pow(t,1/3):t/Rf+zf}function Ft(t){return t>Pf?t*t*t:Rf*(t-zf)}function It(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function Yt(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function Bt(t){if(t instanceof Ht)return new Ht(t.h,t.c,t.l,t.opacity);t instanceof Dt||(t=qt(t));var n=Math.atan2(t.b,t.a)*Sf;return new Ht(n<0?n+360:n,Math.sqrt(t.a*t.a+t.b*t.b),t.l,t.opacity)}function jt(t,n,e,r){return 1===arguments.length?Bt(t):new Ht(t,n,e,null==r?1:r)}function Ht(t,n,e,r){this.h=+t,this.c=+n,this.l=+e,this.opacity=+r}function Xt(t){if(t instanceof Vt)return new Vt(t.h,t.s,t.l,t.opacity);t instanceof At||(t=St(t));var n=t.r/255,e=t.g/255,r=t.b/255,i=(Bf*r+If*n-Yf*e)/(Bf+If-Yf),o=r-i,u=(Ff*(e-i)-Df*o)/Of,a=Math.sqrt(u*u+o*o)/(Ff*i*(1-i)),c=a?Math.atan2(u,o)*Sf-120:NaN;return new Vt(c<0?c+360:c,a,i,t.opacity)}function $t(t,n,e,r){return 1===arguments.length?Xt(t):new Vt(t,n,e,null==r?1:r)}function Vt(t,n,e,r){this.h=+t,this.s=+n,this.l=+e,this.opacity=+r}function Wt(t,n,e,r,i){var o=t*t,u=o*t;return((1-3*t+3*o-u)*n+(4-6*o+3*u)*e+(1+3*t+3*o-3*u)*r+u*i)/6}function Zt(t,n){return function(e){return t+e*n}}function Gt(t,n,e){return t=Math.pow(t,e),n=Math.pow(n,e)-t,e=1/e,function(r){return Math.pow(t+r*n,e)}}function Jt(t,n){var e=n-t;return e?Zt(t,e>180||e<-180?e-360*Math.round(e/360):e):Jf(isNaN(t)?n:t)}function Qt(t){return 1==(t=+t)?Kt:function(n,e){return e-n?Gt(n,e,t):Jf(isNaN(n)?e:n)}}function Kt(t,n){var e=n-t;return e?Zt(t,e):Jf(isNaN(t)?n:t)}function tn(t){return function(n){var e,r,i=n.length,o=new Array(i),u=new Array(i),a=new Array(i);for(e=0;e180?n+=360:n-t>180&&(t+=360),o.push({i:e.push(i(e)+"rotate(",null,r)-2,x:rl(t,n)})):n&&e.push(i(e)+"rotate("+n+r)}function a(t,n,e,o){t!==n?o.push({i:e.push(i(e)+"skewX(",null,r)-2,x:rl(t,n)}):n&&e.push(i(e)+"skewX("+n+r)}function c(t,n,e,r,o,u){if(t!==e||n!==r){var a=o.push(i(o)+"scale(",null,",",null,")");u.push({i:a-4,x:rl(t,e)},{i:a-2,x:rl(n,r)})}else 1===e&&1===r||o.push(i(o)+"scale("+e+","+r+")")}return function(n,e){var r=[],i=[];return n=t(n),e=t(e),o(n.translateX,n.translateY,e.translateX,e.translateY,r,i),u(n.rotate,e.rotate,r,i),a(n.skewX,e.skewX,r,i),c(n.scaleX,n.scaleY,e.scaleX,e.scaleY,r,i),n=e=null,function(t){for(var n,e=-1,o=i.length;++e=0&&n._call.call(null,t),n=n._next;--Ml}function _n(){El=(Sl=Cl.now())+Al,Ml=Tl=0;try{vn()}finally{Ml=0,gn(),El=0}}function yn(){var t=Cl.now(),n=t-Sl;n>Nl&&(Al-=n,Sl=t)}function gn(){for(var t,n,e=Vf,r=1/0;e;)e._call?(r>e._time&&(r=e._time),t=e,e=e._next):(n=e._next,e._next=null,e=t?t._next=n:Vf=n);Wf=t,mn(r)}function mn(t){if(!Ml){Tl&&(Tl=clearTimeout(Tl));var n=t-El;n>24?(t<1/0&&(Tl=setTimeout(_n,n)),kl&&(kl=clearInterval(kl))):(kl||(Sl=El,kl=setInterval(yn,Nl)),Ml=1,zl(_n))}}function xn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>ql)throw new Error("too late");return e}function bn(t,n){var e=t.__transition;if(!e||!(e=e[n])||e.state>Dl)throw new Error("too late");return e}function wn(t,n){var e=t.__transition;if(!e||!(e=e[n]))throw new Error("too late");return e}function Mn(t,n,e){function r(c){var s,f,l,h;if(e.state!==Ul)return o();for(s in a)if((h=a[s]).name===e.name){if(h.state===Ol)return Pl(r);h.state===Fl?(h.state=Yl,h.timer.stop(),h.on.call("interrupt",t,t.__data__,h.index,h.group),delete a[s]):+s=0&&(t=t.slice(0,n)),!t||"start"===t})}function Yn(t,n,e){var r,i,o=In(n)?xn:bn;return function(){var u=o(this,t),a=u.on;a!==r&&(i=(r=a).copy()).on(n,e),u.on=i}}function Bn(t){return function(){var n=this.parentNode;for(var e in this.__transition)if(+e!==t)return;n&&n.removeChild(this)}}function jn(t,n){var e,r,i;return function(){var o=B(this,t),u=(this.style.removeProperty(t),B(this,t));return o===u?null:o===e&&u===r?i:i=n(e=o,r=u)}}function Hn(t){return function(){this.style.removeProperty(t)}}function Xn(t,n,e){var r,i;return function(){var o=B(this,t);return o===e?null:o===r?i:i=n(r=o,e)}}function $n(t,n,e){var r,i,o;return function(){var u=B(this,t),a=e(this);return null==a&&(this.style.removeProperty(t),a=B(this,t)),u===a?null:u===r&&a===i?o:o=n(r=u,i=a)}}function Vn(t,n,e){function r(){var r=this,i=n.apply(r,arguments);return i&&function(n){r.style.setProperty(t,i(n),e)}}return r._value=n,r}function Wn(t){return function(){this.textContent=t}}function Zn(t){return function(){var n=t(this);this.textContent=null==n?"":n}}function Gn(t,n,e,r){this._groups=t,this._parents=n,this._name=e,this._id=r}function Jn(t){return dt().transition(t)}function Qn(){return++$l}function Kn(t){return((t*=2)<=1?t*t:--t*(2-t)+1)/2}function te(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}function ne(t){return(1-Math.cos(Jl*t))/2}function ee(t){return((t*=2)<=1?Math.pow(2,10*t-10):2-Math.pow(2,10-10*t))/2}function re(t){return((t*=2)<=1?1-Math.sqrt(1-t*t):Math.sqrt(1-(t-=2)*t)+1)/2}function ie(t){return(t=+t)Math.abs(t[1]-U[1])?b=!0:x=!0),U=t,m=!0,xh(),o()}function o(){var t;switch(y=U[0]-q[0],g=U[1]-q[1],T){case wh:case bh:k&&(y=Math.max(C-a,Math.min(P-p,y)),s=a+y,d=p+y),N&&(g=Math.max(z-l,Math.min(R-v,g)),h=l+g,_=v+g);break;case Mh:k<0?(y=Math.max(C-a,Math.min(P-a,y)),s=a+y,d=p):k>0&&(y=Math.max(C-p,Math.min(P-p,y)),s=a,d=p+y),N<0?(g=Math.max(z-l,Math.min(R-l,g)),h=l+g,_=v):N>0&&(g=Math.max(z-v,Math.min(R-v,g)),h=l,_=v+g);break;case Th:k&&(s=Math.max(C,Math.min(P,a-y*k)),d=Math.max(C,Math.min(P,p+y*k))),N&&(h=Math.max(z,Math.min(R,l-g*N)),_=Math.max(z,Math.min(R,v+g*N)))}d0&&(a=s-y),N<0?v=_-g:N>0&&(l=h-g),T=wh,F.attr("cursor",Eh.selection),o());break;default:return}xh()},!0).on("keyup.brush",function(){switch(t.event.keyCode){case 16:L&&(x=b=L=!1,o());break;case 18:T===Th&&(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh,o());break;case 32:T===wh&&(t.event.altKey?(k&&(p=d-y*k,a=s+y*k),N&&(v=_-g*N,l=h+g*N),T=Th):(k<0?p=d:k>0&&(a=s),N<0?v=_:N>0&&(l=h),T=Mh),F.attr("cursor",Eh[M]),o());break;default:return}xh()},!0).on("mousemove.brush",e,!0).on("mouseup.brush",u,!0);lf(t.event.view)}ue(),jl(w),r.call(w),D.start()}}function a(){var t=this.__brush||{selection:null};return t.extent=s.apply(this,arguments),t.dim=n,t}var c,s=se,f=ce,l=h(e,"start","brush","end"),p=6;return e.move=function(t,e){t.selection?t.on("start.brush",function(){i(this,arguments).beforestart().start()}).on("interrupt.brush end.brush",function(){i(this,arguments).end()}).tween("brush",function(){function t(t){u.selection=1===t&&le(s)?null:f(t),r.call(o),a.brush()}var o=this,u=o.__brush,a=i(o,arguments),c=u.selection,s=n.input("function"==typeof e?e.apply(this,arguments):e,u.extent),f=cl(c,s);return c&&s?t:t(1)}):t.each(function(){var t=this,o=arguments,u=t.__brush,a=n.input("function"==typeof e?e.apply(t,o):e,u.extent),c=i(t,o).beforestart();jl(t),u.selection=null==a||le(a)?null:a,r.call(t),c.start().brush().end()})},o.prototype={beforestart:function(){return 1==++this.active&&(this.state.emitter=this,this.starting=!0),this},start:function(){return this.starting&&(this.starting=!1,this.emit("start")),this},brush:function(){return this.emit("brush"),this},end:function(){return 0==--this.active&&(delete this.state.emitter,this.emit("end")),this},emit:function(t){N(new mh(e,t,n.output(this.state.selection)),l.apply,l,[t,this.that,this.args])}},e.extent=function(t){return arguments.length?(s="function"==typeof t?t:gh([[+t[0][0],+t[0][1]],[+t[1][0],+t[1][1]]]),e):s},e.filter=function(t){return arguments.length?(f="function"==typeof t?t:gh(!!t),e):f},e.handleSize=function(t){return arguments.length?(p=+t,e):p},e.on=function(){var t=l.on.apply(l,arguments);return t===l?e:t},e}function pe(t){return function(n,e){return t(n.source.value+n.target.value,e.source.value+e.target.value)}}function de(){this._x0=this._y0=this._x1=this._y1=null,this._=""}function ve(){return new de}function _e(t){return t.source}function ye(t){return t.target}function ge(t){return t.radius}function me(t){return t.startAngle}function xe(t){return t.endAngle}function be(){}function we(t,n){var e=new be;if(t instanceof be)t.each(function(t,n){e.set(n,t)});else if(Array.isArray(t)){var r,i=-1,o=t.length;if(null==n)for(;++i=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u,i=p,!(p=p[l=f<<1|s]))return i[l]=d,t;if(a=+t._x.call(null,p.data),c=+t._y.call(null,p.data),n===a&&e===c)return d.next=p,i?i[l]=d:t._root=d,t;do{i=i?i[l]=new Array(4):t._root=new Array(4),(s=n>=(o=(v+y)/2))?v=o:y=o,(f=e>=(u=(_+g)/2))?_=u:g=u}while((l=f<<1|s)==(h=(c>=u)<<1|a>=o));return i[h]=p,i[l]=d,t}function Re(t){return t[0]}function Le(t){return t[1]}function qe(t,n,e){var r=new Ue(null==n?Re:n,null==e?Le:e,NaN,NaN,NaN,NaN);return null==t?r:r.addAll(t)}function Ue(t,n,e,r,i,o){this._x=t,this._y=n,this._x0=e,this._y0=r,this._x1=i,this._y1=o,this._root=void 0}function De(t){for(var n={data:t.data},e=n;t=t.next;)e=e.next={data:t.data};return n}function Oe(t){return t.x+t.vx}function Fe(t){return t.y+t.vy}function Ie(t){return t.index}function Ye(t,n){var e=t.get(n);if(!e)throw new Error("missing: "+n);return e}function Be(t){return t.x}function je(t){return t.y}function He(t){return new Xe(t)}function Xe(t){if(!(n=vp.exec(t)))throw new Error("invalid format: "+t);var n,e=n[1]||" ",r=n[2]||">",i=n[3]||"-",o=n[4]||"",u=!!n[5],a=n[6]&&+n[6],c=!!n[7],s=n[8]&&+n[8].slice(1),f=n[9]||"";"n"===f?(c=!0,f="g"):dp[f]||(f=""),(u||"0"===e&&"="===r)&&(u=!0,e="0",r="="),this.fill=e,this.align=r,this.sign=i,this.symbol=o,this.zero=u,this.width=a,this.comma=c,this.precision=s,this.type=f}function $e(n){return _p=mp(n),t.format=_p.format,t.formatPrefix=_p.formatPrefix,_p}function Ve(){this.reset()}function We(t,n,e){var r=t.s=n+e,i=r-n,o=r-i;t.t=n-o+(e-i)}function Ze(t){return t>1?0:t<-1?rd:Math.acos(t)}function Ge(t){return t>1?id:t<-1?-id:Math.asin(t)}function Je(t){return(t=yd(t/2))*t}function Qe(){}function Ke(t,n){t&&wd.hasOwnProperty(t.type)&&wd[t.type](t,n)}function tr(t,n,e){var r,i=-1,o=t.length-e;for(n.lineStart();++i=0?1:-1,i=r*e,o=hd(n),u=yd(n),a=Ep*u,c=Sp*o+a*hd(i),s=a*r*yd(i);Td.add(ld(s,c)),Np=t,Sp=o,Ep=u}function ur(t){return[ld(t[1],t[0]),Ge(t[2])]}function ar(t){var n=t[0],e=t[1],r=hd(e);return[r*hd(n),r*yd(n),yd(e)]}function cr(t,n){return t[0]*n[0]+t[1]*n[1]+t[2]*n[2]}function sr(t,n){return[t[1]*n[2]-t[2]*n[1],t[2]*n[0]-t[0]*n[2],t[0]*n[1]-t[1]*n[0]]}function fr(t,n){t[0]+=n[0],t[1]+=n[1],t[2]+=n[2]}function lr(t,n){return[t[0]*n,t[1]*n,t[2]*n]}function hr(t){var n=md(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=n,t[1]/=n,t[2]/=n}function pr(t,n){Dp.push(Op=[Ap=t,zp=t]),nPp&&(Pp=n)}function dr(t,n){var e=ar([t*cd,n*cd]);if(Up){var r=sr(Up,e),i=sr([r[1],-r[0],0],r);hr(i),i=ur(i);var o,u=t-Rp,a=u>0?1:-1,c=i[0]*ad*a,s=sd(u)>180;s^(a*RpPp&&(Pp=o):(c=(c+360)%360-180,s^(a*RpPp&&(Pp=n))),s?txr(Ap,zp)&&(zp=t):xr(t,zp)>xr(Ap,zp)&&(Ap=t):zp>=Ap?(tzp&&(zp=t)):t>Rp?xr(Ap,t)>xr(Ap,zp)&&(zp=t):xr(t,zp)>xr(Ap,zp)&&(Ap=t)}else Dp.push(Op=[Ap=t,zp=t]);nPp&&(Pp=n),Up=e,Rp=t}function vr(){Ed.point=dr}function _r(){Op[0]=Ap,Op[1]=zp,Ed.point=pr,Up=null}function yr(t,n){if(Up){var e=t-Rp;Sd.add(sd(e)>180?e+(e>0?360:-360):e)}else Lp=t,qp=n;Nd.point(t,n),dr(t,n)}function gr(){Nd.lineStart()}function mr(){yr(Lp,qp),Nd.lineEnd(),sd(Sd)>ed&&(Ap=-(zp=180)),Op[0]=Ap,Op[1]=zp,Up=null}function xr(t,n){return(n-=t)<0?n+360:n}function br(t,n){return t[0]-n[0]}function wr(t,n){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:nrd?t-ud:t<-rd?t+ud:t,n]}function Lr(t,n,e){return(t%=ud)?n||e?zd(Ur(t),Dr(n,e)):Ur(t):n||e?Dr(n,e):Rr}function qr(t){return function(n,e){return n+=t,[n>rd?n-ud:n<-rd?n+ud:n,e]}}function Ur(t){var n=qr(t);return n.invert=qr(-t),n}function Dr(t,n){function e(t,n){var e=hd(n),a=hd(t)*e,c=yd(t)*e,s=yd(n),f=s*r+a*i;return[ld(c*o-f*u,a*r-s*i),Ge(f*o+c*u)]}var r=hd(t),i=yd(t),o=hd(n),u=yd(n);return e.invert=function(t,n){var e=hd(n),a=hd(t)*e,c=yd(t)*e,s=yd(n),f=s*o-c*u;return[ld(c*o+s*u,a*r+f*i),Ge(f*r-a*i)]},e}function Or(t,n,e,r,i,o){if(e){var u=hd(n),a=yd(n),c=r*e;null==i?(i=n+r*ud,o=n-c/2):(i=Fr(u,i),o=Fr(u,o),(r>0?io)&&(i+=r*ud));for(var s,f=i;r>0?f>o:f0)do{s.point(0===f||3===f?t:e,f>1?r:n)}while((f=(f+a+4)%4)!==l);else s.point(o[0],o[1])}function u(r,i){return sd(r[0]-t)0?0:3:sd(r[0]-e)0?2:1:sd(r[1]-n)0?1:0:i>0?3:2}function a(t,n){return c(t.x,n.x)}function c(t,n){var e=u(t,1),r=u(n,1);return e!==r?e-r:0===e?n[1]-t[1]:1===e?t[0]-n[0]:2===e?t[1]-n[1]:n[0]-t[0]}return function(u){function c(t,n){i(t,n)&&w.point(t,n)}function s(){for(var n=0,e=0,i=h.length;er&&(l-o)*(r-u)>(p-u)*(t-o)&&++n:p<=r&&(l-o)*(r-u)<(p-u)*(t-o)&&--n;return n}function f(o,u){var a=i(o,u);if(h&&p.push([o,u]),x)d=o,v=u,_=a,x=!1,a&&(w.lineStart(),w.point(o,u));else if(a&&m)w.point(o,u);else{var c=[y=Math.max(Zd,Math.min(Wd,y)),g=Math.max(Zd,Math.min(Wd,g))],s=[o=Math.max(Zd,Math.min(Wd,o)),u=Math.max(Zd,Math.min(Wd,u))];Xd(c,s,t,n,e,r)?(m||(w.lineStart(),w.point(c[0],c[1])),w.point(s[0],s[1]),a||w.lineEnd(),b=!1):a&&(w.lineStart(),w.point(o,u),b=!1)}y=o,g=u,m=a}var l,h,p,d,v,_,y,g,m,x,b,w=u,M=Hd(),T={point:c,lineStart:function(){T.point=f,h&&h.push(p=[]),x=!0,m=!1,y=g=NaN},lineEnd:function(){l&&(f(d,v),_&&m&&M.rejoin(),l.push(M.result())),T.point=c,m&&w.lineEnd()},polygonStart:function(){w=M,l=[],h=[],b=!0},polygonEnd:function(){var t=s(),n=b&&t,e=(l=Cs(l)).length;(n||e)&&(u.polygonStart(),n&&(u.lineStart(),o(null,null,1,u),u.lineEnd()),e&&Vd(l,a,t,o,u),u.polygonEnd()),w=u,l=h=p=null}};return T}}function jr(){Kd.point=Kd.lineEnd=Qe}function Hr(t,n){Pd=t*=cd,Rd=yd(n*=cd),Ld=hd(n),Kd.point=Xr}function Xr(t,n){t*=cd;var e=yd(n*=cd),r=hd(n),i=sd(t-Pd),o=hd(i),u=r*yd(i),a=Ld*e-Rd*r*o,c=Rd*e+Ld*r*o;Qd.add(ld(md(u*u+a*a),c)),Pd=t,Rd=e,Ld=r}function $r(t,n){return!(!t||!ov.hasOwnProperty(t.type))&&ov[t.type](t,n)}function Vr(t,n){return 0===rv(t,n)}function Wr(t,n){var e=rv(t[0],t[1]);return rv(t[0],n)+rv(n,t[1])<=e+ed}function Zr(t,n){return!!Jd(t.map(Gr),Jr(n))}function Gr(t){return(t=t.map(Jr)).pop(),t}function Jr(t){return[t[0]*cd,t[1]*cd]}function Qr(t,n,e){var r=Ms(t,n-ed,e).concat(n);return function(t){return r.map(function(n){return[t,n]})}}function Kr(t,n,e){var r=Ms(t,n-ed,e).concat(n);return function(t){return r.map(function(n){return[n,t]})}}function ti(){function t(){return{type:"MultiLineString",coordinates:n()}}function n(){return Ms(pd(o/_)*_,i,_).map(h).concat(Ms(pd(s/y)*y,c,y).map(p)).concat(Ms(pd(r/d)*d,e,d).filter(function(t){return sd(t%_)>ed}).map(f)).concat(Ms(pd(a/v)*v,u,v).filter(function(t){return sd(t%y)>ed}).map(l))}var e,r,i,o,u,a,c,s,f,l,h,p,d=10,v=d,_=90,y=360,g=2.5;return t.lines=function(){return n().map(function(t){return{type:"LineString",coordinates:t}})},t.outline=function(){return{type:"Polygon",coordinates:[h(o).concat(p(c).slice(1),h(i).reverse().slice(1),p(s).reverse().slice(1))]}},t.extent=function(n){return arguments.length?t.extentMajor(n).extentMinor(n):t.extentMinor()},t.extentMajor=function(n){return arguments.length?(o=+n[0][0],i=+n[1][0],s=+n[0][1],c=+n[1][1],o>i&&(n=o,o=i,i=n),s>c&&(n=s,s=c,c=n),t.precision(g)):[[o,s],[i,c]]},t.extentMinor=function(n){return arguments.length?(r=+n[0][0],e=+n[1][0],a=+n[0][1],u=+n[1][1],r>e&&(n=r,r=e,e=n),a>u&&(n=a,a=u,u=n),t.precision(g)):[[r,a],[e,u]]},t.step=function(n){return arguments.length?t.stepMajor(n).stepMinor(n):t.stepMinor()},t.stepMajor=function(n){return arguments.length?(_=+n[0],y=+n[1],t):[_,y]},t.stepMinor=function(n){return arguments.length?(d=+n[0],v=+n[1],t):[d,v]},t.precision=function(n){return arguments.length?(g=+n,f=Qr(a,u,90),l=Kr(r,e,g),h=Qr(s,c,90),p=Kr(o,i,g),t):g},t.extentMajor([[-180,-90+ed],[180,90-ed]]).extentMinor([[-180,-80-ed],[180,80+ed]])}function ni(){sv.point=ei}function ei(t,n){sv.point=ri,qd=Dd=t,Ud=Od=n}function ri(t,n){cv.add(Od*t-Dd*n),Dd=t,Od=n}function ii(){ri(qd,Ud)}function oi(t,n){vv+=t,_v+=n,++yv}function ui(){Tv.point=ai}function ai(t,n){Tv.point=ci,oi(Yd=t,Bd=n)}function ci(t,n){var e=t-Yd,r=n-Bd,i=md(e*e+r*r);gv+=i*(Yd+t)/2,mv+=i*(Bd+n)/2,xv+=i,oi(Yd=t,Bd=n)}function si(){Tv.point=oi}function fi(){Tv.point=hi}function li(){pi(Fd,Id)}function hi(t,n){Tv.point=pi,oi(Fd=Yd=t,Id=Bd=n)}function pi(t,n){var e=t-Yd,r=n-Bd,i=md(e*e+r*r);gv+=i*(Yd+t)/2,mv+=i*(Bd+n)/2,xv+=i,bv+=(i=Bd*t-Yd*n)*(Yd+t),wv+=i*(Bd+n),Mv+=3*i,oi(Yd=t,Bd=n)}function di(t){this._context=t}function vi(t,n){zv.point=_i,Nv=Ev=t,Sv=Av=n}function _i(t,n){Ev-=t,Av-=n,Cv.add(md(Ev*Ev+Av*Av)),Ev=t,Av=n}function yi(){this._string=[]}function gi(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function mi(t){return t.length>1}function xi(t,n){return((t=t.x)[0]<0?t[1]-id-ed:id-t[1])-((n=n.x)[0]<0?n[1]-id-ed:id-n[1])}function bi(t,n,e,r){var i,o,u=yd(t-e);return sd(u)>ed?fd((yd(n)*(o=hd(r))*yd(e)-yd(r)*(i=hd(n))*yd(t))/(i*o*u)):(n+r)/2}function wi(t){return function(n){var e=new Mi;for(var r in t)e[r]=t[r];return e.stream=n,e}}function Mi(){}function Ti(t,n,e){var r=n[1][0]-n[0][0],i=n[1][1]-n[0][1],o=t.clipExtent&&t.clipExtent();t.scale(150).translate([0,0]),null!=o&&t.clipExtent(null),Md(e,t.stream(dv));var u=dv.result(),a=Math.min(r/(u[1][0]-u[0][0]),i/(u[1][1]-u[0][1])),c=+n[0][0]+(r-a*(u[1][0]+u[0][0]))/2,s=+n[0][1]+(i-a*(u[1][1]+u[0][1]))/2;return null!=o&&t.clipExtent(o),t.scale(150*a).translate([c,s])}function ki(t,n,e){return Ti(t,[[0,0],n],e)}function Ni(t){return wi({point:function(n,e){n=t(n,e),this.stream.point(n[0],n[1])}})}function Si(t,n){function e(r,i,o,u,a,c,s,f,l,h,p,d,v,_){var y=s-r,g=f-i,m=y*y+g*g;if(m>4*n&&v--){var x=u+h,b=a+p,w=c+d,M=md(x*x+b*b+w*w),T=Ge(w/=M),k=sd(sd(w)-1)n||sd((y*A+g*C)/m-.5)>.3||u*h+a*p+c*d2?t[2]%360*cd:0,i()):[b*ad,w*ad,M*ad]},n.precision=function(t){return arguments.length?(A=Dv(r,E=t*t),o()):md(E)},n.fitExtent=function(t,e){return Ti(n,t,e)},n.fitSize=function(t,e){return ki(n,t,e)},function(){return u=t.apply(this,arguments),n.invert=u.invert&&e,i()}}function Ci(t){var n=0,e=rd/3,r=Ai(t),i=r(n,e);return i.parallels=function(t){return arguments.length?r(n=t[0]*cd,e=t[1]*cd):[n*ad,e*ad]},i}function zi(t){function n(t,n){return[t*e,yd(n)/e]}var e=hd(t);return n.invert=function(t,n){return[t/e,Ge(n*e)]},n}function Pi(t,n){function e(t,n){var e=md(o-2*i*yd(n))/i;return[e*yd(t*=i),u-e*hd(t)]}var r=yd(t),i=(r+yd(n))/2;if(sd(i)0?n<-id+ed&&(n=-id+ed):n>id-ed&&(n=id-ed);var e=o/_d(Oi(n),i);return[e*yd(i*t),o-e*hd(i*t)]}var r=hd(t),i=t===n?yd(t):vd(r/hd(n))/vd(Oi(n)/Oi(t)),o=r*_d(Oi(t),i)/i;return i?(e.invert=function(t,n){var e=o-n,r=gd(i)*md(t*t+e*e);return[ld(t,sd(e))/i*gd(e),2*fd(_d(o/r,1/i))-id]},e):Ui}function Ii(t,n){return[t,n]}function Yi(t,n){function e(t,n){var e=o-n,r=i*t;return[e*yd(r),o-e*hd(r)]}var r=hd(t),i=t===n?yd(t):(r-hd(n))/(n-t),o=r/i+t;return sd(i)=0;)n+=e[r].value;else n=1;t.value=n}function no(t,n){if(t===n)return t;var e=t.ancestors(),r=n.ancestors(),i=null;for(t=e.pop(),n=r.pop();t===n;)i=t,t=e.pop(),n=r.pop();return i}function eo(t,n){var e,r,i,o,u,a=new uo(t),c=+t.value&&(a.value=t.value),s=[a];for(null==n&&(n=ro);e=s.pop();)if(c&&(e.value=+e.data.value),(i=n(e.data))&&(u=i.length))for(e.children=new Array(u),o=u-1;o>=0;--o)s.push(r=e.children[o]=new uo(i[o])),r.parent=e,r.depth=e.depth+1;return a.eachBefore(oo)}function ro(t){return t.children}function io(t){t.data=t.data.data}function oo(t){var n=0;do{t.height=n}while((t=t.parent)&&t.height<++n)}function uo(t){this.data=t,this.depth=this.height=0,this.parent=null}function ao(t){for(var n,e,r=t.length;r;)e=Math.random()*r--|0,n=t[r],t[r]=t[e],t[e]=n;return t}function co(t,n){var e,r;if(lo(n,t))return[n];for(e=0;e0&&e*e>r*r+i*i}function lo(t,n){for(var e=0;ee*e+r*r}function mo(t){var n=t._,e=t.next._,r=n.r+e.r,i=(n.x*e.r+e.x*n.r)/r,o=(n.y*e.r+e.y*n.r)/r;return i*i+o*o}function xo(t){this._=t,this.next=null,this.previous=null}function bo(t){if(!(i=t.length))return 0;var n,e,r,i,o,u,a,c,s,f,l;if(n=t[0],n.x=0,n.y=0,!(i>1))return n.r;if(e=t[1],n.x=-e.r,e.x=n.r,e.y=0,!(i>2))return n.r+e.r;yo(e,n,r=t[2]),n=new xo(n),e=new xo(e),r=new xo(r),n.next=r.previous=e,e.next=n.previous=r,r.next=e.previous=n;t:for(a=3;a=0;)(n=i[o]).z+=e,n.m+=e,e+=n.s+(r+=n.c)}function Uo(t,n,e){return t.a.parent===n.parent?t.a:e}function Do(t,n){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=n}function Oo(t){for(var n,e,r,i,o,u=new Do(t,0),a=[u];n=a.pop();)if(r=n._.children)for(n.children=new Array(o=r.length),i=o-1;i>=0;--i)a.push(e=n.children[i]=new Do(r[i],i)),e.parent=n;return(u.parent=new Do(null,0)).children=[u],u}function Fo(t,n,e,r,i,o){for(var u,a,c,s,f,l,h,p,d,v,_,y=[],g=n.children,m=0,x=0,b=g.length,w=n.value;mh&&(h=a),_=f*f*v,(p=Math.max(h/_,_/l))>d){f-=a;break}d=p}y.push(u={value:f,dice:c1&&n_(t[e[r-2]],t[e[r-1]],t[i])<=0;)--r;e[r++]=i}return e.slice(0,r)}function Bo(t){this._size=t,this._call=this._error=null,this._tasks=[],this._data=[],this._waiting=this._active=this._ended=this._start=0}function jo(t){if(!t._start)try{Ho(t)}catch(n){if(t._tasks[t._ended+t._active-1])$o(t,n);else if(!t._data)throw n}}function Ho(t){for(;t._start=t._waiting&&t._active=0;)if((e=t._tasks[r])&&(t._tasks[r]=null,e.abort))try{e.abort()}catch(n){}t._active=NaN,Vo(t)}function Vo(t){if(!t._active&&t._call){var n=t._data;t._data=void 0,t._call(t._error,n)}}function Wo(t){if(null==t)t=1/0;else if(!((t=+t)>=1))throw new Error("invalid concurrency");return new Bo(t)}function Zo(t){return function(n,e){t(null==n?e:null)}}function Go(t){var n=t.responseType;return n&&"text"!==n?t.response:t.responseText}function Jo(t,n){return function(e){return t(e.responseText,n)}}function Qo(t){function n(n){var o=n+"",u=e.get(o);if(!u){if(i!==M_)return i;e.set(o,u=r.push(n))}return t[(u-1)%t.length]}var e=we(),r=[],i=M_;return t=null==t?[]:w_.call(t),n.domain=function(t){if(!arguments.length)return r.slice();r=[],e=we();for(var i,o,u=-1,a=t.length;++u=e?1:r(t)}}}function ru(t){return function(n,e){var r=t(n=+n,e=+e);return function(t){return t<=0?n:t>=1?e:r(t)}}}function iu(t,n,e,r){var i=t[0],o=t[1],u=n[0],a=n[1];return o2?ou:iu,o=u=null,r}function r(n){return(o||(o=i(a,c,f?eu(t):t,s)))(+n)}var i,o,u,a=N_,c=N_,s=cl,f=!1;return r.invert=function(t){return(u||(u=i(c,a,nu,f?ru(n):n)))(+t)},r.domain=function(t){return arguments.length?(a=b_.call(t,k_),e()):a.slice()},r.range=function(t){return arguments.length?(c=w_.call(t),e()):c.slice()},r.rangeRound=function(t){return c=w_.call(t),s=sl,e()},r.clamp=function(t){return arguments.length?(f=!!t,e()):f},r.interpolate=function(t){return arguments.length?(s=t,e()):s},e()}function cu(t){var n=t.domain;return t.ticks=function(t){var e=n();return Ss(e[0],e[e.length-1],null==t?10:t)},t.tickFormat=function(t,e){return S_(n(),t,e)},t.nice=function(e){null==e&&(e=10);var i,o=n(),u=0,a=o.length-1,c=o[u],s=o[a];return s0?i=r(c=Math.floor(c/i)*i,s=Math.ceil(s/i)*i,e):i<0&&(i=r(c=Math.ceil(c*i)/i,s=Math.floor(s*i)/i,e)),i>0?(o[u]=Math.floor(c/i)*i,o[a]=Math.ceil(s/i)*i,n(o)):i<0&&(o[u]=Math.ceil(c*i)/i,o[a]=Math.floor(s*i)/i,n(o)),t},t}function su(){var t=au(nu,rl);return t.copy=function(){return uu(t,su())},cu(t)}function fu(){function t(t){return+t}var n=[0,1];return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=b_.call(e,k_),t):n.slice()},t.copy=function(){return fu().domain(n)},cu(t)}function lu(t,n){return(n=Math.log(n/t))?function(e){return Math.log(e/t)/n}:T_(n)}function hu(t,n){return t<0?function(e){return-Math.pow(-n,e)*Math.pow(-t,1-e)}:function(e){return Math.pow(n,e)*Math.pow(t,1-e)}}function pu(t){return isFinite(t)?+("1e"+t):t<0?0:t}function du(t){return 10===t?pu:t===Math.E?Math.exp:function(n){return Math.pow(t,n)}}function vu(t){return t===Math.E?Math.log:10===t&&Math.log10||2===t&&Math.log2||(t=Math.log(t),function(n){return Math.log(n)/t})}function _u(t){return function(n){return-t(-n)}}function yu(){function n(){return o=vu(i),u=du(i),r()[0]<0&&(o=_u(o),u=_u(u)),e}var e=au(lu,hu).domain([1,10]),r=e.domain,i=10,o=vu(10),u=du(10);return e.base=function(t){return arguments.length?(i=+t,n()):i},e.domain=function(t){return arguments.length?(r(t),n()):r()},e.ticks=function(t){var n,e=r(),a=e[0],c=e[e.length-1];(n=c0){for(;hc)break;v.push(l)}}else for(;h=1;--f)if(!((l=s*f)c)break;v.push(l)}}else v=Ss(h,p,Math.min(p-h,d)).map(u);return n?v.reverse():v},e.tickFormat=function(n,r){if(null==r&&(r=10===i?".0e":","),"function"!=typeof r&&(r=t.format(r)),n===1/0)return r;null==n&&(n=10);var a=Math.max(1,i*n/e.ticks().length);return function(t){var n=t/u(Math.round(o(t)));return n*i0?i[n-1]:e[0],n=i?[o[i-1],r]:[o[n-1],o[n]]},t.copy=function(){return bu().domain([e,r]).range(u)},cu(t)}function wu(){function t(t){if(t<=t)return e[hs(n,t,0,r)]}var n=[.5],e=[0,1],r=1;return t.domain=function(i){return arguments.length?(n=w_.call(i),r=Math.min(n.length,e.length-1),t):n.slice()},t.range=function(i){return arguments.length?(e=w_.call(i),r=Math.min(n.length,e.length-1),t):e.slice()},t.invertExtent=function(t){var r=e.indexOf(t);return[n[r-1],n[r]]},t.copy=function(){return wu().domain(n).range(e)},t}function Mu(t,n,e,r){function i(n){return t(n=new Date(+n)),n}return i.floor=i,i.ceil=function(e){return t(e=new Date(e-1)),n(e,1),t(e),e},i.round=function(t){var n=i(t),e=i.ceil(t);return t-n0))return u;do{u.push(new Date(+e))}while(n(e,o),t(e),e=n)for(;t(n),!e(n);)n.setTime(n-1)},function(t,r){if(t>=t)if(r<0)for(;++r<=0;)for(;n(t,-1),!e(t););else for(;--r>=0;)for(;n(t,1),!e(t););})},e&&(i.count=function(n,r){return A_.setTime(+n),C_.setTime(+r),t(A_),t(C_),Math.floor(e(A_,C_))},i.every=function(t){return t=Math.floor(t),isFinite(t)&&t>0?t>1?i.filter(r?function(n){return r(n)%t==0}:function(n){return i.count(0,n)%t==0}):i:null}),i}function Tu(t){return Mu(function(n){n.setDate(n.getDate()-(n.getDay()+7-t)%7),n.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+7*n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*R_)/L_})}function ku(t){return Mu(function(n){n.setUTCDate(n.getUTCDate()-(n.getUTCDay()+7-t)%7),n.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+7*n)},function(t,n){return(n-t)/L_})}function Nu(t){if(0<=t.y&&t.y<100){var n=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return n.setFullYear(t.y),n}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Su(t){if(0<=t.y&&t.y<100){var n=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return n.setUTCFullYear(t.y),n}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function Eu(t){return{y:t,m:0,d:1,H:0,M:0,S:0,L:0}}function Au(t){function n(t,n){return function(e){var r,i,o,u=[],a=-1,c=0,s=t.length;for(e instanceof Date||(e=new Date(+e));++a=c)return-1;if(37===(i=n.charCodeAt(u++))){if(i=n.charAt(u++),!(o=T[i in Py?n.charAt(u++):i])||(r=o(t,e,r))<0)return-1}else if(i!=e.charCodeAt(r++))return-1}return r}var i=t.dateTime,o=t.date,u=t.time,a=t.periods,c=t.days,s=t.shortDays,f=t.months,l=t.shortMonths,h=Pu(a),p=Ru(a),d=Pu(c),v=Ru(c),_=Pu(s),y=Ru(s),g=Pu(f),m=Ru(f),x=Pu(l),b=Ru(l),w={a:function(t){return s[t.getDay()]},A:function(t){return c[t.getDay()]},b:function(t){return l[t.getMonth()]},B:function(t){return f[t.getMonth()]},c:null,d:Wu,e:Wu,H:Zu,I:Gu,j:Ju,L:Qu,m:Ku,M:ta,p:function(t){return a[+(t.getHours()>=12)]},S:na,U:ea,w:ra,W:ia,x:null,X:null,y:oa,Y:ua,Z:aa,"%":wa},M={a:function(t){return s[t.getUTCDay()]},A:function(t){return c[t.getUTCDay()]},b:function(t){return l[t.getUTCMonth()]},B:function(t){return f[t.getUTCMonth()]},c:null,d:ca,e:ca,H:sa,I:fa,j:la,L:ha,m:pa,M:da,p:function(t){return a[+(t.getUTCHours()>=12)]},S:va,U:_a,w:ya,W:ga,x:null,X:null,y:ma,Y:xa,Z:ba,"%":wa},T={a:function(t,n,e){var r=_.exec(n.slice(e));return r?(t.w=y[r[0].toLowerCase()],e+r[0].length):-1},A:function(t,n,e){var r=d.exec(n.slice(e));return r?(t.w=v[r[0].toLowerCase()],e+r[0].length):-1},b:function(t,n,e){var r=x.exec(n.slice(e));return r?(t.m=b[r[0].toLowerCase()],e+r[0].length):-1},B:function(t,n,e){var r=g.exec(n.slice(e));return r?(t.m=m[r[0].toLowerCase()],e+r[0].length):-1},c:function(t,n,e){return r(t,i,n,e)},d:Yu,e:Yu,H:ju,I:ju,j:Bu,L:$u,m:Iu,M:Hu,p:function(t,n,e){var r=h.exec(n.slice(e));return r?(t.p=p[r[0].toLowerCase()],e+r[0].length):-1},S:Xu,U:qu,w:Lu,W:Uu,x:function(t,n,e){return r(t,o,n,e)},X:function(t,n,e){return r(t,u,n,e)},y:Ou,Y:Du,Z:Fu,"%":Vu};return w.x=n(o,w),w.X=n(u,w),w.c=n(i,w),M.x=n(o,M),M.X=n(u,M),M.c=n(i,M),{format:function(t){var e=n(t+="",w);return e.toString=function(){return t},e},parse:function(t){var n=e(t+="",Nu);return n.toString=function(){return t},n},utcFormat:function(t){var e=n(t+="",M);return e.toString=function(){return t},e},utcParse:function(t){var n=e(t,Su);return n.toString=function(){return t},n}}}function Cu(t,n,e){var r=t<0?"-":"",i=(r?-t:t)+"",o=i.length;return r+(o68?1900:2e3),e+r[0].length):-1}function Fu(t,n,e){var r=/^(Z)|([+-]\d\d)(?:\:?(\d\d))?/.exec(n.slice(e,e+6));return r?(t.Z=r[1]?0:-(r[2]+(r[3]||"00")),e+r[0].length):-1}function Iu(t,n,e){var r=Ry.exec(n.slice(e,e+2));return r?(t.m=r[0]-1,e+r[0].length):-1}function Yu(t,n,e){var r=Ry.exec(n.slice(e,e+2));return r?(t.d=+r[0],e+r[0].length):-1}function Bu(t,n,e){var r=Ry.exec(n.slice(e,e+3));return r?(t.m=0,t.d=+r[0],e+r[0].length):-1}function ju(t,n,e){var r=Ry.exec(n.slice(e,e+2));return r?(t.H=+r[0],e+r[0].length):-1}function Hu(t,n,e){var r=Ry.exec(n.slice(e,e+2));return r?(t.M=+r[0],e+r[0].length):-1}function Xu(t,n,e){var r=Ry.exec(n.slice(e,e+2));return r?(t.S=+r[0],e+r[0].length):-1}function $u(t,n,e){var r=Ry.exec(n.slice(e,e+3));return r?(t.L=+r[0],e+r[0].length):-1}function Vu(t,n,e){var r=Ly.exec(n.slice(e,e+1));return r?e+r[0].length:-1}function Wu(t,n){return Cu(t.getDate(),n,2)}function Zu(t,n){return Cu(t.getHours(),n,2)}function Gu(t,n){return Cu(t.getHours()%12||12,n,2)}function Ju(t,n){return Cu(1+Y_.count(oy(t),t),n,3)}function Qu(t,n){return Cu(t.getMilliseconds(),n,3)}function Ku(t,n){return Cu(t.getMonth()+1,n,2)}function ta(t,n){return Cu(t.getMinutes(),n,2)}function na(t,n){return Cu(t.getSeconds(),n,2)}function ea(t,n){return Cu(j_.count(oy(t),t),n,2)}function ra(t){return t.getDay()}function ia(t,n){return Cu(H_.count(oy(t),t),n,2)}function oa(t,n){return Cu(t.getFullYear()%100,n,2)}function ua(t,n){return Cu(t.getFullYear()%1e4,n,4)}function aa(t){var n=t.getTimezoneOffset();return(n>0?"-":(n*=-1,"+"))+Cu(n/60|0,"0",2)+Cu(n%60,"0",2)}function ca(t,n){return Cu(t.getUTCDate(),n,2)}function sa(t,n){return Cu(t.getUTCHours(),n,2)}function fa(t,n){return Cu(t.getUTCHours()%12||12,n,2)}function la(t,n){return Cu(1+ly.count(Ay(t),t),n,3)}function ha(t,n){return Cu(t.getUTCMilliseconds(),n,3)}function pa(t,n){return Cu(t.getUTCMonth()+1,n,2)}function da(t,n){return Cu(t.getUTCMinutes(),n,2)}function va(t,n){return Cu(t.getUTCSeconds(),n,2)}function _a(t,n){return Cu(py.count(Ay(t),t),n,2)}function ya(t){return t.getUTCDay()}function ga(t,n){return Cu(dy.count(Ay(t),t),n,2)}function ma(t,n){return Cu(t.getUTCFullYear()%100,n,2)}function xa(t,n){return Cu(t.getUTCFullYear()%1e4,n,4)}function ba(){return"+0000"}function wa(){return"%"}function Ma(n){return Cy=Au(n),t.timeFormat=Cy.format,t.timeParse=Cy.parse,t.utcFormat=Cy.utcFormat,t.utcParse=Cy.utcParse,Cy}function Ta(t){return new Date(t)}function ka(t){return t instanceof Date?+t:+new Date(+t)}function Na(t,n,e,r,o,u,a,c,s){function f(i){return(a(i)1?0:t<-1?pg:Math.acos(t)}function Ca(t){return t>=1?dg:t<=-1?-dg:Math.asin(t)}function za(t){return t.innerRadius}function Pa(t){return t.outerRadius}function Ra(t){return t.startAngle}function La(t){return t.endAngle}function qa(t){return t&&t.padAngle}function Ua(t,n,e,r,i,o,u,a){var c=e-t,s=r-n,f=u-i,l=a-o,h=(f*(n-o)-l*(t-i))/(l*c-f*s);return[t+h*c,n+h*s]}function Da(t,n,e,r,i,o,u){var a=t-e,c=n-r,s=(u?o:-o)/lg(a*a+c*c),f=s*c,l=-s*a,h=t+f,p=n+l,d=e+f,v=r+l,_=(h+d)/2,y=(p+v)/2,g=d-h,m=v-p,x=g*g+m*m,b=i-o,w=h*v-d*p,M=(m<0?-1:1)*lg(cg(0,b*b*x-w*w)),T=(w*m-g*M)/x,k=(-w*g-m*M)/x,N=(w*m+g*M)/x,S=(-w*g+m*M)/x,E=T-_,A=k-y,C=N-_,z=S-y;return E*E+A*A>C*C+z*z&&(T=N,k=S),{cx:T,cy:k,x01:-f,y01:-l,x11:T*(i/b-1),y11:k*(i/b-1)}}function Oa(t){this._context=t}function Fa(t){return t[0]}function Ia(t){return t[1]}function Ya(t){this._curve=t}function Ba(t){function n(n){return new Ya(t(n))}return n._curve=t,n}function ja(t){var n=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t}function Ha(t){return t.source}function Xa(t){return t.target}function $a(t){function n(){var n,a=kg.call(arguments),c=e.apply(this,a),s=r.apply(this,a);if(u||(u=n=ve()),t(u,+i.apply(this,(a[0]=c,a)),+o.apply(this,a),+i.apply(this,(a[0]=s,a)),+o.apply(this,a)),n)return u=null,n+""||null}var e=Ha,r=Xa,i=Fa,o=Ia,u=null;return n.source=function(t){return arguments.length?(e=t,n):e},n.target=function(t){return arguments.length?(r=t,n):r},n.x=function(t){return arguments.length?(i="function"==typeof t?t:ig(+t),n):i},n.y=function(t){return arguments.length?(o="function"==typeof t?t:ig(+t),n):o},n.context=function(t){return arguments.length?(u=null==t?null:t,n):u},n}function Va(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n=(n+r)/2,e,n,i,r,i)}function Wa(t,n,e,r,i){t.moveTo(n,e),t.bezierCurveTo(n,e=(e+i)/2,r,e,r,i)}function Za(t,n,e,r,i){var o=Tg(n,e),u=Tg(n,e=(e+i)/2),a=Tg(r,e),c=Tg(r,i);t.moveTo(o[0],o[1]),t.bezierCurveTo(u[0],u[1],a[0],a[1],c[0],c[1])}function Ga(t,n,e){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+n)/6,(t._y0+4*t._y1+e)/6)}function Ja(t){this._context=t}function Qa(t){this._context=t}function Ka(t){this._context=t}function tc(t,n){this._basis=new Ja(t),this._beta=n}function nc(t,n,e){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-n),t._y2+t._k*(t._y1-e),t._x2,t._y2)}function ec(t,n){this._context=t,this._k=(1-n)/6}function rc(t,n){this._context=t,this._k=(1-n)/6}function ic(t,n){this._context=t,this._k=(1-n)/6}function oc(t,n,e){var r=t._x1,i=t._y1,o=t._x2,u=t._y2;if(t._l01_a>hg){var a=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,c=3*t._l01_a*(t._l01_a+t._l12_a);r=(r*a-t._x0*t._l12_2a+t._x2*t._l01_2a)/c,i=(i*a-t._y0*t._l12_2a+t._y2*t._l01_2a)/c}if(t._l23_a>hg){var s=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,f=3*t._l23_a*(t._l23_a+t._l12_a);o=(o*s+t._x1*t._l23_2a-n*t._l12_2a)/f,u=(u*s+t._y1*t._l23_2a-e*t._l12_2a)/f}t._context.bezierCurveTo(r,i,o,u,t._x2,t._y2)}function uc(t,n){this._context=t,this._alpha=n}function ac(t,n){this._context=t,this._alpha=n}function cc(t,n){this._context=t,this._alpha=n}function sc(t){this._context=t}function fc(t){return t<0?-1:1}function lc(t,n,e){var r=t._x1-t._x0,i=n-t._x1,o=(t._y1-t._y0)/(r||i<0&&-0),u=(e-t._y1)/(i||r<0&&-0),a=(o*i+u*r)/(r+i);return(fc(o)+fc(u))*Math.min(Math.abs(o),Math.abs(u),.5*Math.abs(a))||0}function hc(t,n){var e=t._x1-t._x0;return e?(3*(t._y1-t._y0)/e-n)/2:n}function pc(t,n,e){var r=t._x0,i=t._y0,o=t._x1,u=t._y1,a=(o-r)/3;t._context.bezierCurveTo(r+a,i+a*n,o-a,u-a*e,o,u)}function dc(t){this._context=t}function vc(t){this._context=new _c(t)}function _c(t){this._context=t}function yc(t){this._context=t}function gc(t){var n,e,r=t.length-1,i=new Array(r),o=new Array(r),u=new Array(r);for(i[0]=0,o[0]=2,u[0]=t[0]+2*t[1],n=1;n=0;--n)i[n]=(u[n]-i[n+1])/o[n];for(o[r-1]=(t[r]+i[r-1])/2,n=0;n0)){if(o/=h,h<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=r-c,h||!(o<0)){if(o/=h,h<0){if(o>l)return;o>f&&(f=o)}else if(h>0){if(o0)){if(o/=p,p<0){if(o0){if(o>l)return;o>f&&(f=o)}if(o=i-s,p||!(o<0)){if(o/=p,p<0){if(o>l)return;o>f&&(f=o)}else if(p>0){if(o0||l<1)||(f>0&&(t[0]=[c+f*h,s+f*p]),l<1&&(t[1]=[c+l*h,s+l*p]),!0)}}}}}function Rc(t,n,e,r,i){var o=t[1];if(o)return!0;var u,a,c=t[0],s=t.left,f=t.right,l=s[0],h=s[1],p=f[0],d=f[1],v=(l+p)/2,_=(h+d)/2;if(d===h){if(v=r)return;if(l>p){if(c){if(c[1]>=i)return}else c=[v,e];o=[v,i]}else{if(c){if(c[1]1)if(l>p){if(c){if(c[1]>=i)return}else c=[(e-a)/u,e];o=[(i-a)/u,i]}else{if(c){if(c[1]=r)return}else c=[n,u*n+a];o=[r,u*r+a]}else{if(c){if(c[0]sm||Math.abs(i[0][1]-i[1][1])>sm)||delete um[o]}function qc(t){return im[t.index]={site:t,halfedges:[]}}function Uc(t,n){var e=t.site,r=n.left,i=n.right;return e===i&&(i=r,r=e),i?Math.atan2(i[1]-r[1],i[0]-r[0]):(e===r?(r=n[1],i=n[0]):(r=n[0],i=n[1]),Math.atan2(r[0]-i[0],i[1]-r[1]))}function Dc(t,n){return n[+(n.left!==t.site)]}function Oc(t,n){return n[+(n.left===t.site)]}function Fc(){for(var t,n,e,r,i=0,o=im.length;ism||Math.abs(v-h)>sm)&&(c.splice(a,0,um.push(Cc(u,p,Math.abs(d-t)sm?[t,Math.abs(l-t)sm?[Math.abs(h-r)sm?[e,Math.abs(l-e)sm?[Math.abs(h-n)=-fm)){var p=c*c+s*s,d=f*f+l*l,v=(l*p-s*d)/h,_=(c*d-f*p)/h,y=am.pop()||new Yc;y.arc=t,y.site=i,y.x=v+u,y.y=(y.cy=_+a)+Math.sqrt(v*v+_*_),t.circle=y;for(var g=null,m=om._;m;)if(y.ysm)a=a.L;else{if(!((i=o-Gc(a,u))>sm)){r>-sm?(n=a.P,e=a):i>-sm?(n=a,e=a.N):n=e=a;break}if(!a.R){n=a;break}a=a.R}qc(t);var c=Xc(t);if(rm.insert(n,c),n||e){if(n===e)return jc(n),e=Xc(n.site),rm.insert(c,e),c.edge=e.edge=Ac(n.site,c.site),Bc(n),void Bc(e);if(e){jc(n),jc(e);var s=n.site,f=s[0],l=s[1],h=t[0]-f,p=t[1]-l,d=e.site,v=d[0]-f,_=d[1]-l,y=2*(h*_-p*v),g=h*h+p*p,m=v*v+_*_,x=[(_*g-p*m)/y+f,(h*m-v*g)/y+l];zc(e.edge,s,d,x),c.edge=Ac(s,t,null,x),e.edge=Ac(t,d,null,x),Bc(n),Bc(e)}else c.edge=Ac(n.site,c.site)}}function Zc(t,n){var e=t.site,r=e[0],i=e[1],o=i-n;if(!o)return r;var u=t.P;if(!u)return-1/0;var a=(e=u.site)[0],c=e[1],s=c-n;if(!s)return a;var f=a-r,l=1/o-1/s,h=f/s;return l?(-h+Math.sqrt(h*h-2*l*(f*f/(-2*s)-c+s/2+i-o/2)))/l+r:(r+a)/2}function Gc(t,n){var e=t.N;if(e)return Zc(e,n);var r=t.site;return r[1]===n?r[0]:1/0}function Jc(t,n,e){return(t[0]-e[0])*(n[1]-t[1])-(t[0]-n[0])*(e[1]-t[1])}function Qc(t,n){return n[1]-t[1]||n[0]-t[0]}function Kc(t,n){var e,r,i,o=t.sort(Qc).pop();for(um=[],im=new Array(t.length),rm=new Tc,om=new Tc;;)if(i=em,o&&(!i||o[1]n?1:t>=n?0:NaN},fs=function(t){return 1===t.length&&(t=n(t)),{left:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)<0?r=o+1:i=o}return r},right:function(n,e,r,i){for(null==r&&(r=0),null==i&&(i=n.length);r>>1;t(n[o],e)>0?i=o:r=o+1}return r}}},ls=fs(ss),hs=ls.right,ps=ls.left,ds=function(t){return null===t?NaN:+t},vs=function(t,n){var e,r,i=t.length,o=0,u=-1,a=0,c=0;if(null==n)for(;++u1)return c/(o-1)},_s=function(t,n){var e=vs(t,n);return e?Math.sqrt(e):e},ys=function(t,n){var e,r,i,o=t.length,u=-1;if(null==n){for(;++u=e)for(r=i=e;++ue&&(r=e),i=e)for(r=i=e;++ue&&(r=e),i0)for(t=Math.ceil(t/u),n=Math.floor(n/u),o=new Array(i=Math.ceil(n-t+1));++c=1)return+e(t[r-1],r-1,t);var r,i=(r-1)*n,o=Math.floor(i),u=+e(t[o],o,t);return u+(+e(t[o+1],o+1,t)-u)*(i-o)}},Cs=function(t){for(var n,e,r,i=t.length,o=-1,u=0;++o=0;)for(n=(r=t[i]).length;--n>=0;)e[--u]=r[n];return e},zs=function(t,n){var e,r,i=t.length,o=-1;if(null==n){for(;++o=e)for(r=e;++oe&&(r=e)}else for(;++o=e)for(r=e;++oe&&(r=e);return r},Ps=function(t){if(!(i=t.length))return[];for(var n=-1,e=zs(t,o),r=new Array(e);++n0)for(var e,r,i=new Array(e),o=0;o=0&&"xmlns"!==(n=t.slice(0,e))&&(t=t.slice(e+1)),Bs.hasOwnProperty(n)?{space:Bs[n],local:t}:t},Hs=function(t){var n=js(t);return(n.local?g:y)(n)},Xs=0;x.prototype=m.prototype={constructor:x,get:function(t){for(var n=this._;!(n in t);)if(!(t=t.parentNode))return;return t[n]},set:function(t,n){return t[this._]=n},remove:function(t){return this._ in t&&delete t[this._]},toString:function(){return this._}};var $s=function(t){return function(){return this.matches(t)}};if("undefined"!=typeof document){var Vs=document.documentElement;if(!Vs.matches){var Ws=Vs.webkitMatchesSelector||Vs.msMatchesSelector||Vs.mozMatchesSelector||Vs.oMatchesSelector;$s=function(t){return function(){return Ws.call(this,t)}}}}var Zs=$s,Gs={};t.event=null,"undefined"!=typeof document&&("onmouseenter"in document.documentElement||(Gs={mouseenter:"mouseover",mouseleave:"mouseout"}));var Js=function(){for(var n,e=t.event;n=e.sourceEvent;)e=n;return e},Qs=function(t,n){var e=t.ownerSVGElement||t;if(e.createSVGPoint){var r=e.createSVGPoint();return r.x=n.clientX,r.y=n.clientY,r=r.matrixTransform(t.getScreenCTM().inverse()),[r.x,r.y]}var i=t.getBoundingClientRect();return[n.clientX-i.left-t.clientLeft,n.clientY-i.top-t.clientTop]},Ks=function(t){var n=Js();return n.changedTouches&&(n=n.changedTouches[0]),Qs(t,n)},tf=function(t){return null==t?S:function(){return this.querySelector(t)}},nf=function(t){return null==t?E:function(){return this.querySelectorAll(t)}},ef=function(t){return new Array(t.length)};A.prototype={constructor:A,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,n){return this._parent.insertBefore(t,n)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};var rf=function(t){return function(){return t}},of="$",uf=function(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView};W.prototype={add:function(t){this._names.indexOf(t)<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var n=this._names.indexOf(t);n>=0&&(this._names.splice(n,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};var af=[null];pt.prototype=dt.prototype={constructor:pt,select:function(t){"function"!=typeof t&&(t=tf(t));for(var n=this._groups,e=n.length,r=new Array(e),i=0;i=x&&(x=m+1);!(g=_[x])&&++x=0;)(r=i[o])&&(u&&u!==r.nextSibling&&u.parentNode.insertBefore(r,u),u=r);return this},sort:function(t){t||(t=P);for(var n=this._groups,e=n.length,r=new Array(e),i=0;i1?this.each((null==n?F:"function"==typeof n?Y:I)(t,n,null==e?"":e)):B(this.node(),t)},property:function(t,n){return arguments.length>1?this.each((null==n?j:"function"==typeof n?X:H)(t,n)):this.node()[t]},classed:function(t,n){var e=$(t+"");if(arguments.length<2){for(var r=V(this.node()),i=-1,o=e.length;++i=240?t-240:t+120,i,r),Lt(t,i,r),Lt(t<120?t+240:t-120,i,r),this.opacity)},displayable:function(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1}}));var Nf=Math.PI/180,Sf=180/Math.PI,Ef=.95047,Af=1,Cf=1.08883,zf=4/29,Pf=6/29,Rf=3*Pf*Pf,Lf=Pf*Pf*Pf;pf(Dt,Ut,wt(Mt,{brighter:function(t){return new Dt(this.l+18*(null==t?1:t),this.a,this.b,this.opacity)},darker:function(t){return new Dt(this.l-18*(null==t?1:t),this.a,this.b,this.opacity)},rgb:function(){var t=(this.l+16)/116,n=isNaN(this.a)?t:t+this.a/500,e=isNaN(this.b)?t:t-this.b/200;return t=Af*Ft(t),n=Ef*Ft(n),e=Cf*Ft(e),new At(It(3.2404542*n-1.5371385*t-.4985314*e),It(-.969266*n+1.8760108*t+.041556*e),It(.0556434*n-.2040259*t+1.0572252*e),this.opacity)}})),pf(Ht,jt,wt(Mt,{brighter:function(t){return new Ht(this.h,this.c,this.l+18*(null==t?1:t),this.opacity)},darker:function(t){return new Ht(this.h,this.c,this.l-18*(null==t?1:t),this.opacity)},rgb:function(){return qt(this).rgb()}}));var qf=-.14861,Uf=1.78277,Df=-.29227,Of=-.90649,Ff=1.97294,If=Ff*Of,Yf=Ff*Uf,Bf=Uf*Df-Of*qf;pf(Vt,$t,wt(Mt,{brighter:function(t){return t=null==t?1/.7:Math.pow(1/.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},darker:function(t){return t=null==t?.7:Math.pow(.7,t),new Vt(this.h,this.s,this.l*t,this.opacity)},rgb:function(){var t=isNaN(this.h)?0:(this.h+120)*Nf,n=+this.l,e=isNaN(this.s)?0:this.s*n*(1-n),r=Math.cos(t),i=Math.sin(t);return new At(255*(n+e*(qf*r+Uf*i)),255*(n+e*(Df*r+Of*i)),255*(n+e*(Ff*r)),this.opacity)}}));var jf,Hf,Xf,$f,Vf,Wf,Zf=function(t){var n=t.length-1;return function(e){var r=e<=0?e=0:e>=1?(e=1,n-1):Math.floor(e*n),i=t[r],o=t[r+1],u=r>0?t[r-1]:2*i-o,a=ro&&(i=n.slice(o,i),a[u]?a[u]+=i:a[++u]=i),(e=e[0])===(r=r[0])?a[u]?a[u]+=r:a[++u]=r:(a[++u]=null,c.push({i:u,x:rl(e,r)})),o=ul.lastIndex;return oDl&&e.state1e-6)if(Math.abs(f*a-c*s)>1e-6&&i){var h=e-o,p=r-u,d=a*a+c*c,v=h*h+p*p,_=Math.sqrt(d),y=Math.sqrt(l),g=i*Math.tan((Yh-Math.acos((d+l-v)/(2*_*y)))/2),m=g/y,x=g/_;Math.abs(m-1)>1e-6&&(this._+="L"+(t+m*s)+","+(n+m*f)),this._+="A"+i+","+i+",0,0,"+ +(f*h>s*p)+","+(this._x1=t+x*a)+","+(this._y1=n+x*c)}else this._+="L"+(this._x1=t)+","+(this._y1=n);else;},arc:function(t,n,e,r,i,o){t=+t,n=+n;var u=(e=+e)*Math.cos(r),a=e*Math.sin(r),c=t+u,s=n+a,f=1^o,l=o?r-i:i-r;if(e<0)throw new Error("negative radius: "+e);null===this._x1?this._+="M"+c+","+s:(Math.abs(this._x1-c)>1e-6||Math.abs(this._y1-s)>1e-6)&&(this._+="L"+c+","+s),e&&(l<0&&(l=l%Bh+Bh),l>jh?this._+="A"+e+","+e+",0,1,"+f+","+(t-u)+","+(n-a)+"A"+e+","+e+",0,1,"+f+","+(this._x1=c)+","+(this._y1=s):l>1e-6&&(this._+="A"+e+","+e+",0,"+ +(l>=Yh)+","+f+","+(this._x1=t+e*Math.cos(i))+","+(this._y1=n+e*Math.sin(i))))},rect:function(t,n,e,r){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+n)+"h"+ +e+"v"+ +r+"h"+-e+"Z"},toString:function(){return this._}};be.prototype=we.prototype={constructor:be,has:function(t){return"$"+t in this},get:function(t){return this["$"+t]},set:function(t,n){return this["$"+t]=n,this},remove:function(t){var n="$"+t;return n in this&&delete this[n]},clear:function(){for(var t in this)"$"===t[0]&&delete this[t]},keys:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(n.slice(1));return t},values:function(){var t=[];for(var n in this)"$"===n[0]&&t.push(this[n]);return t},entries:function(){var t=[];for(var n in this)"$"===n[0]&&t.push({key:n.slice(1),value:this[n]});return t},size:function(){var t=0;for(var n in this)"$"===n[0]&&++t;return t},empty:function(){for(var t in this)if("$"===t[0])return!1;return!0},each:function(t){for(var n in this)"$"===n[0]&&t(this[n],n.slice(1),this)}};var Hh=we.prototype;Se.prototype=Ee.prototype={constructor:Se,has:Hh.has,add:function(t){return t+="",this["$"+t]=t,this},remove:Hh.remove,clear:Hh.clear,values:Hh.keys,size:Hh.size,empty:Hh.empty,each:Hh.each};var Xh=function(t){function n(t,n){function e(){if(f>=s)return a;if(i)return i=!1,u;var n,e=f;if(34===t.charCodeAt(e)){for(var r=e;r++f&&(f=r),il&&(l=i));for(ft||t>i||r>n||n>o))return this;var u,a,c=i-e,s=this._root;switch(a=(n<(r+o)/2)<<1|t<(e+i)/2){case 0:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,o=r+c,t>i||n>o);break;case 1:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,o=r+c,e>t||n>o);break;case 2:do{u=new Array(4),u[a]=s,s=u}while(c*=2,i=e+c,r=o-c,t>i||r>n);break;case 3:do{u=new Array(4),u[a]=s,s=u}while(c*=2,e=i-c,r=o-c,e>t||r>n)}this._root&&this._root.length&&(this._root=s)}return this._x0=e,this._y0=r,this._x1=i,this._y1=o,this},op.data=function(){var t=[];return this.visit(function(n){if(!n.length)do{t.push(n.data)}while(n=n.next)}),t},op.extent=function(t){return arguments.length?this.cover(+t[0][0],+t[0][1]).cover(+t[1][0],+t[1][1]):isNaN(this._x0)?void 0:[[this._x0,this._y0],[this._x1,this._y1]]},op.find=function(t,n,e){var r,i,o,u,a,c,s,f=this._x0,l=this._y0,h=this._x1,p=this._y1,d=[],v=this._root;for(v&&d.push(new ip(v,f,l,h,p)),null==e?e=1/0:(f=t-e,l=n-e,h=t+e,p=n+e,e*=e);c=d.pop();)if(!(!(v=c.node)||(i=c.x0)>h||(o=c.y0)>p||(u=c.x1)=y)<<1|t>=_)&&(c=d[d.length-1],d[d.length-1]=d[d.length-1-s],d[d.length-1-s]=c)}else{var g=t-+this._x.call(null,v.data),m=n-+this._y.call(null,v.data),x=g*g+m*m;if(x=(a=(d+_)/2))?d=a:_=a,(f=u>=(c=(v+y)/2))?v=c:y=c,n=p,!(p=p[l=f<<1|s]))return this;if(!p.length)break;(n[l+1&3]||n[l+2&3]||n[l+3&3])&&(e=n,h=l)}for(;p.data!==t;)if(r=p,!(p=p.next))return this;return(i=p.next)&&delete p.next,r?(i?r.next=i:delete r.next,this):n?(i?n[l]=i:delete n[l],(p=n[0]||n[1]||n[2]||n[3])&&p===(n[3]||n[2]||n[1]||n[0])&&!p.length&&(e?e[h]=p:this._root=p),this):(this._root=i,this)},op.removeAll=function(t){for(var n=0,e=t.length;n1?r[0]+r.slice(2):r,+t.slice(e+1)]},fp=function(t){return(t=sp(Math.abs(t)))?t[1]:NaN},lp=function(t,n){return function(e,r){for(var i=e.length,o=[],u=0,a=t[0],c=0;i>0&&a>0&&(c+a+1>r&&(a=Math.max(1,r-c)),o.push(e.substring(i-=a,i+a)),!((c+=a+1)>r));)a=t[u=(u+1)%t.length];return o.reverse().join(n)}},hp=function(t){return function(n){return n.replace(/[0-9]/g,function(n){return t[+n]})}},pp=function(t,n){var e=sp(t,n);if(!e)return t+"";var r=e[0],i=e[1];return i<0?"0."+new Array(-i).join("0")+r:r.length>i+1?r.slice(0,i+1)+"."+r.slice(i+1):r+new Array(i-r.length+2).join("0")},dp={"":function(t,n){t:for(var e,r=(t=t.toPrecision(n)).length,i=1,o=-1;i0&&(o=0)}return o>0?t.slice(0,o)+t.slice(e+1):t},"%":function(t,n){return(100*t).toFixed(n)},b:function(t){return Math.round(t).toString(2)},c:function(t){return t+""},d:function(t){return Math.round(t).toString(10)},e:function(t,n){return t.toExponential(n)},f:function(t,n){return t.toFixed(n)},g:function(t,n){return t.toPrecision(n)},o:function(t){return Math.round(t).toString(8)},p:function(t,n){return pp(100*t,n)},r:pp,s:function(t,n){var e=sp(t,n);if(!e)return t+"";var r=e[0],i=e[1],o=i-(up=3*Math.max(-8,Math.min(8,Math.floor(i/3))))+1,u=r.length;return o===u?r:o>u?r+new Array(o-u+1).join("0"):o>0?r.slice(0,o)+"."+r.slice(o):"0."+new Array(1-o).join("0")+sp(t,Math.max(0,n+o-1))[0]},X:function(t){return Math.round(t).toString(16).toUpperCase()},x:function(t){return Math.round(t).toString(16)}},vp=/^(?:(.)?([<>=^]))?([+\-\( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?([a-z%])?$/i;He.prototype=Xe.prototype,Xe.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(null==this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(null==this.precision?"":"."+Math.max(0,0|this.precision))+this.type};var _p,yp=function(t){return t},gp=["y","z","a","f","p","n","µ","m","","k","M","G","T","P","E","Z","Y"],mp=function(t){function n(t){function n(t){var n,r,u,f=_,x=y;if("c"===v)x=g(t)+x,t="";else{var b=(t=+t)<0;if(t=g(Math.abs(t),d),b&&0==+t&&(b=!1),f=(b?"("===s?s:"-":"-"===s||"("===s?"":s)+f,x=x+("s"===v?gp[8+up/3]:"")+(b&&"("===s?")":""),m)for(n=-1,r=t.length;++n(u=t.charCodeAt(n))||u>57){x=(46===u?i+t.slice(n+1):t.slice(n))+x,t=t.slice(0,n);break}}p&&!l&&(t=e(t,1/0));var w=f.length+t.length+x.length,M=w>1)+f+t+x+M.slice(w);break;default:t=M+f+t+x}return o(t)}var a=(t=He(t)).fill,c=t.align,s=t.sign,f=t.symbol,l=t.zero,h=t.width,p=t.comma,d=t.precision,v=t.type,_="$"===f?r[0]:"#"===f&&/[boxX]/.test(v)?"0"+v.toLowerCase():"",y="$"===f?r[1]:/[%p]/.test(v)?u:"",g=dp[v],m=!v||/[defgprs%]/.test(v);return d=null==d?v?6:12:/[gprs]/.test(v)?Math.max(1,Math.min(21,d)):Math.max(0,Math.min(20,d)),n.toString=function(){return t+""},n}var e=t.grouping&&t.thousands?lp(t.grouping,t.thousands):yp,r=t.currency,i=t.decimal,o=t.numerals?hp(t.numerals):yp,u=t.percent||"%";return{format:n,formatPrefix:function(t,e){var r=n((t=He(t),t.type="f",t)),i=3*Math.max(-8,Math.min(8,Math.floor(fp(e)/3))),o=Math.pow(10,-i),u=gp[8+i/3];return function(t){return r(o*t)+u}}}};$e({decimal:".",thousands:",",grouping:[3],currency:["$",""]});var xp=function(t){return Math.max(0,-fp(Math.abs(t)))},bp=function(t,n){return Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(fp(n)/3)))-fp(Math.abs(t)))},wp=function(t,n){return t=Math.abs(t),n=Math.abs(n)-t,Math.max(0,fp(n)-fp(t))+1},Mp=function(){return new Ve};Ve.prototype={constructor:Ve,reset:function(){this.s=this.t=0},add:function(t){We(nd,t,this.t),We(this,nd.s,this.s),this.s?this.t+=nd.t:this.s=nd.t},valueOf:function(){return this.s}};var Tp,kp,Np,Sp,Ep,Ap,Cp,zp,Pp,Rp,Lp,qp,Up,Dp,Op,Fp,Ip,Yp,Bp,jp,Hp,Xp,$p,Vp,Wp,Zp,Gp,Jp,Qp,Kp,td,nd=new Ve,ed=1e-6,rd=Math.PI,id=rd/2,od=rd/4,ud=2*rd,ad=180/rd,cd=rd/180,sd=Math.abs,fd=Math.atan,ld=Math.atan2,hd=Math.cos,pd=Math.ceil,dd=Math.exp,vd=Math.log,_d=Math.pow,yd=Math.sin,gd=Math.sign||function(t){return t>0?1:t<0?-1:0},md=Math.sqrt,xd=Math.tan,bd={Feature:function(t,n){Ke(t.geometry,n)},FeatureCollection:function(t,n){for(var e=t.features,r=-1,i=e.length;++red?Pp=90:Sd<-ed&&(Cp=-90),Op[0]=Ap,Op[1]=zp}},Ad={sphere:Qe,point:Mr,lineStart:kr,lineEnd:Er,polygonStart:function(){Ad.lineStart=Ar,Ad.lineEnd=Cr},polygonEnd:function(){Ad.lineStart=kr,Ad.lineEnd=Er}},Cd=function(t){return function(){return t}},zd=function(t,n){function e(e,r){return e=t(e,r),n(e[0],e[1])}return t.invert&&n.invert&&(e.invert=function(e,r){return(e=n.invert(e,r))&&t.invert(e[0],e[1])}),e};Rr.invert=Rr;var Pd,Rd,Ld,qd,Ud,Dd,Od,Fd,Id,Yd,Bd,jd=function(t){function n(n){return n=t(n[0]*cd,n[1]*cd),n[0]*=ad,n[1]*=ad,n}return t=Lr(t[0]*cd,t[1]*cd,t.length>2?t[2]*cd:0),n.invert=function(n){return n=t.invert(n[0]*cd,n[1]*cd),n[0]*=ad,n[1]*=ad,n},n},Hd=function(){var t,n=[];return{point:function(n,e){t.push([n,e])},lineStart:function(){n.push(t=[])},lineEnd:Qe,rejoin:function(){n.length>1&&n.push(n.pop().concat(n.shift()))},result:function(){var e=n;return n=[],t=null,e}}},Xd=function(t,n,e,r,i,o){var u,a=t[0],c=t[1],s=0,f=1,l=n[0]-a,h=n[1]-c;if(u=e-a,l||!(u>0)){if(u/=l,l<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=i-a,l||!(u<0)){if(u/=l,l<0){if(u>f)return;u>s&&(s=u)}else if(l>0){if(u0)){if(u/=h,h<0){if(u0){if(u>f)return;u>s&&(s=u)}if(u=o-c,h||!(u<0)){if(u/=h,h<0){if(u>f)return;u>s&&(s=u)}else if(h>0){if(u0&&(t[0]=a+s*l,t[1]=c+s*h),f<1&&(n[0]=a+f*l,n[1]=c+f*h),!0}}}}},$d=function(t,n){return sd(t[0]-n[0])=0;--o)i.point((f=s[o])[0],f[1]);else r(h.x,h.p.x,-1,i);h=h.p}s=(h=h.o).z,p=!p}while(!h.v);i.lineEnd()}}},Wd=1e9,Zd=-Wd,Gd=Mp(),Jd=function(t,n){var e=n[0],r=n[1],i=[yd(e),-hd(e),0],o=0,u=0;Gd.reset();for(var a=0,c=t.length;a=0?1:-1,T=M*w,k=T>rd,N=d*x;if(Gd.add(ld(N*M*yd(T),v*b+N*hd(T))),o+=k?w+M*ud:w,k^h>=e^g>=e){var S=sr(ar(l),ar(y));hr(S);var E=sr(i,S);hr(E);var A=(k^w>=0?-1:1)*Ge(E[2]);(r>A||r===A&&(S[0]||S[1]))&&(u+=k^w>=0?1:-1)}}return(o<-ed||ohv&&(hv=t),npv&&(pv=n)},lineStart:Qe,lineEnd:Qe,polygonStart:Qe,polygonEnd:Qe,result:function(){var t=[[fv,lv],[hv,pv]];return hv=pv=-(lv=fv=1/0),t}},vv=0,_v=0,yv=0,gv=0,mv=0,xv=0,bv=0,wv=0,Mv=0,Tv={point:oi,lineStart:ui,lineEnd:si,polygonStart:function(){Tv.lineStart=fi,Tv.lineEnd=li},polygonEnd:function(){Tv.point=oi,Tv.lineStart=ui,Tv.lineEnd=si},result:function(){var t=Mv?[bv/Mv,wv/Mv]:xv?[gv/xv,mv/xv]:yv?[vv/yv,_v/yv]:[NaN,NaN];return vv=_v=yv=gv=mv=xv=bv=wv=Mv=0,t}};di.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._context.closePath(),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._context.moveTo(t,n),this._point=1;break;case 1:this._context.lineTo(t,n);break;default:this._context.moveTo(t+this._radius,n),this._context.arc(t,n,this._radius,0,ud)}},result:Qe};var kv,Nv,Sv,Ev,Av,Cv=Mp(),zv={point:Qe,lineStart:function(){zv.point=vi},lineEnd:function(){kv&&_i(Nv,Sv),zv.point=Qe},polygonStart:function(){kv=!0},polygonEnd:function(){kv=null},result:function(){var t=+Cv;return Cv.reset(),t}};yi.prototype={_radius:4.5,_circle:gi(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){0===this._line&&this._string.push("Z"),this._point=NaN},point:function(t,n){switch(this._point){case 0:this._string.push("M",t,",",n),this._point=1;break;case 1:this._string.push("L",t,",",n);break;default:null==this._circle&&(this._circle=gi(this._radius)),this._string.push("M",t,",",n,this._circle)}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}return null}};var Pv=function(t,n,e,r){return function(i,o){function u(n,e){var r=i(n,e);t(n=r[0],e=r[1])&&o.point(n,e)}function a(t,n){var e=i(t,n);_.point(e[0],e[1])}function c(){b.point=a,_.lineStart()}function s(){b.point=u,_.lineEnd()}function f(t,n){v.push([t,n]);var e=i(t,n);m.point(e[0],e[1])}function l(){m.lineStart(),v=[]}function h(){f(v[0][0],v[0][1]),m.lineEnd();var t,n,e,r,i=m.clean(),u=g.result(),a=u.length;if(v.pop(),p.push(v),v=null,a)if(1&i){if(e=u[0],(n=e.length-1)>0){for(x||(o.polygonStart(),x=!0),o.lineStart(),t=0;t1&&2&i&&u.push(u.pop().concat(u.shift())),d.push(u.filter(mi))}var p,d,v,_=n(o),y=i.invert(r[0],r[1]),g=Hd(),m=n(g),x=!1,b={point:u,lineStart:c,lineEnd:s,polygonStart:function(){b.point=f,b.lineStart=l,b.lineEnd=h,d=[],p=[]},polygonEnd:function(){b.point=u,b.lineStart=c,b.lineEnd=s,d=Cs(d);var t=Jd(p,y);d.length?(x||(o.polygonStart(),x=!0),Vd(d,xi,t,e,o)):t&&(x||(o.polygonStart(),x=!0),o.lineStart(),e(null,null,1,o),o.lineEnd()),x&&(o.polygonEnd(),x=!1),d=p=null},sphere:function(){o.polygonStart(),o.lineStart(),e(null,null,1,o),o.lineEnd(),o.polygonEnd()}};return b}},Rv=Pv(function(){return!0},function(t){var n,e=NaN,r=NaN,i=NaN;return{lineStart:function(){t.lineStart(),n=1},point:function(o,u){var a=o>0?rd:-rd,c=sd(o-e);sd(c-rd)0?id:-id),t.point(i,r),t.lineEnd(),t.lineStart(),t.point(a,r),t.point(o,r),n=0):i!==a&&c>=rd&&(sd(e-i)ed){var o=t[0]o}function r(t,n,e){var r=[1,0,0],i=sr(ar(t),ar(n)),u=cr(i,i),a=i[0],c=u-a*a;if(!c)return!e&&t;var s=o*u/c,f=-o*a/c,l=sr(r,i),h=lr(r,s);fr(h,lr(i,f));var p=l,d=cr(h,p),v=cr(p,p),_=d*d-v*(cr(h,h)-1);if(!(_<0)){var y=md(_),g=lr(p,(-d-y)/v);if(fr(g,h),g=ur(g),!e)return g;var m,x=t[0],b=n[0],w=t[1],M=n[1];b0^g[1]<(sd(g[0]-x)rd^(x<=g[0]&&g[0]<=b)){var S=lr(p,(-d+y)/v);return fr(S,h),[g,ur(S)]}}}function i(n,e){var r=u?t:rd-t,i=0;return n<-r?i|=1:n>r&&(i|=2),e<-r?i|=4:e>r&&(i|=8),i}var o=hd(t),u=o>0,a=sd(o)>ed;return Pv(e,function(t){var n,o,c,s,f;return{lineStart:function(){s=c=!1,f=1},point:function(l,h){var p,d=[l,h],v=e(l,h),_=u?v?0:i(l,h):v?i(l+(l<0?rd:-rd),h):0;if(!n&&(s=c=v)&&t.lineStart(),v!==c&&(!(p=r(n,d))||$d(n,p)||$d(d,p))&&(d[0]+=ed,d[1]+=ed,v=e(d[0],d[1])),v!==c)f=0,v?(t.lineStart(),p=r(d,n),t.point(p[0],p[1])):(p=r(n,d),t.point(p[0],p[1]),t.lineEnd()),n=p;else if(a&&n&&u^v){var y;_&o||!(y=r(d,n,!0))||(f=0,u?(t.lineStart(),t.point(y[0][0],y[0][1]),t.point(y[1][0],y[1][1]),t.lineEnd()):(t.point(y[1][0],y[1][1]),t.lineEnd(),t.lineStart(),t.point(y[0][0],y[0][1])))}!v||n&&$d(n,d)||t.point(d[0],d[1]),n=d,c=v,o=_},lineEnd:function(){c&&t.lineEnd(),n=null},clean:function(){return f|(s&&c)<<1}}},function(e,r,i,o){Or(o,t,n,i,e,r)},u?[0,-t]:[-rd,t-rd])};Mi.prototype={constructor:Mi,point:function(t,n){this.stream.point(t,n)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};var qv=16,Uv=hd(30*cd),Dv=function(t,n){return+n?Si(t,n):Ni(t)},Ov=wi({point:function(t,n){this.stream.point(t*cd,n*cd)}}),Fv=function(){return Ci(Pi).scale(155.424).center([0,33.6442])},Iv=function(){return Fv().parallels([29.5,45.5]).scale(1070).translate([480,250]).rotate([96,0]).center([-.6,38.7])},Yv=Li(function(t){return md(2/(1+t))});Yv.invert=qi(function(t){return 2*Ge(t/2)});var Bv=Li(function(t){return(t=Ze(t))&&t/yd(t)});Bv.invert=qi(function(t){return t});Ui.invert=function(t,n){return[t,2*fd(dd(n))-id]};Ii.invert=Ii;Bi.invert=qi(fd);Hi.invert=qi(Ge);Xi.invert=qi(function(t){return 2*fd(t)});$i.invert=function(t,n){return[-n,2*fd(dd(t))-id]};uo.prototype=eo.prototype={constructor:uo,count:function(){return this.eachAfter(to)},each:function(t){var n,e,r,i,o=this,u=[o];do{for(n=u.reverse(),u=[];o=n.pop();)if(t(o),e=o.children)for(r=0,i=e.length;r=0;--e)i.push(n[e]);return this},sum:function(t){return this.eachAfter(function(n){for(var e=+t(n.data)||0,r=n.children,i=r&&r.length;--i>=0;)e+=r[i].value;n.value=e})},sort:function(t){return this.eachBefore(function(n){n.children&&n.children.sort(t)})},path:function(t){for(var n=this,e=no(n,t),r=[n];n!==e;)n=n.parent,r.push(n);for(var i=r.length;t!==e;)r.splice(i,0,t),t=t.parent;return r},ancestors:function(){for(var t=this,n=[t];t=t.parent;)n.push(t);return n},descendants:function(){var t=[];return this.each(function(n){t.push(n)}),t},leaves:function(){var t=[];return this.eachBefore(function(n){n.children||t.push(n)}),t},links:function(){var t=this,n=[];return t.each(function(e){e!==t&&n.push({source:e.parent,target:e})}),n},copy:function(){return eo(this).eachBefore(io)}};var jv=Array.prototype.slice,Hv=function(t){for(var n,e,r=0,i=(t=ao(jv.call(t))).length,o=[];r1?n:1)},e}(Qv),t_=function t(n){function e(t,e,r,i,o){if((u=t._squarify)&&u.ratio===n)for(var u,a,c,s,f,l=-1,h=u.length,p=t.value;++l1?n:1)},e}(Qv),n_=function(t,n,e){return(n[0]-t[0])*(e[1]-t[1])-(n[1]-t[1])*(e[0]-t[0])},e_=[].slice,r_={};Bo.prototype=Wo.prototype={constructor:Bo,defer:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("defer after await");if(null!=this._error)return this;var n=e_.call(arguments,1);return n.push(t),++this._waiting,this._tasks.push(n),jo(this),this},abort:function(){return null==this._error&&$o(this,new Error("abort")),this},await:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=function(n,e){t.apply(null,[n].concat(e))},Vo(this),this},awaitAll:function(t){if("function"!=typeof t)throw new Error("invalid callback");if(this._call)throw new Error("multiple await");return this._call=t,Vo(this),this}};var i_=function(){return Math.random()},o_=function t(n){function e(t,e){return t=null==t?0:+t,e=null==e?1:+e,1===arguments.length?(e=t,t=0):e-=t,function(){return n()*e+t}}return e.source=t,e}(i_),u_=function t(n){function e(t,e){var r,i;return t=null==t?0:+t,e=null==e?1:+e,function(){var o;if(null!=r)o=r,r=null;else do{r=2*n()-1,o=2*n()-1,i=r*r+o*o}while(!i||i>1);return t+e*o*Math.sqrt(-2*Math.log(i)/i)}}return e.source=t,e}(i_),a_=function t(n){function e(){var t=u_.source(n).apply(this,arguments);return function(){return Math.exp(t())}}return e.source=t,e}(i_),c_=function t(n){function e(t){return function(){for(var e=0,r=0;r=200&&e<300||304===e){if(o)try{n=o.call(r,s)}catch(t){return void a.call("error",r,t)}else n=s;a.call("load",r,n)}else a.call("error",r,t)}var r,i,o,u,a=h("beforesend","progress","load","error"),c=we(),s=new XMLHttpRequest,f=null,l=null,p=0;if("undefined"==typeof XDomainRequest||"withCredentials"in s||!/^(http(s)?:)?\/\//.test(t)||(s=new XDomainRequest),"onload"in s?s.onload=s.onerror=s.ontimeout=e:s.onreadystatechange=function(t){s.readyState>3&&e(t)},s.onprogress=function(t){a.call("progress",r,t)},r={header:function(t,n){return t=(t+"").toLowerCase(),arguments.length<2?c.get(t):(null==n?c.remove(t):c.set(t,n+""),r)},mimeType:function(t){return arguments.length?(i=null==t?null:t+"",r):i},responseType:function(t){return arguments.length?(u=t,r):u},timeout:function(t){return arguments.length?(p=+t,r):p},user:function(t){return arguments.length<1?f:(f=null==t?null:t+"",r)},password:function(t){return arguments.length<1?l:(l=null==t?null:t+"",r)},response:function(t){return o=t,r},get:function(t,n){return r.send("GET",t,n)},post:function(t,n){return r.send("POST",t,n)},send:function(n,e,o){return s.open(n,t,!0,f,l),null==i||c.has("accept")||c.set("accept",i+",*/*"),s.setRequestHeader&&c.each(function(t,n){s.setRequestHeader(n,t)}),null!=i&&s.overrideMimeType&&s.overrideMimeType(i),null!=u&&(s.responseType=u),p>0&&(s.timeout=p),null==o&&"function"==typeof e&&(o=e,e=null),null!=o&&1===o.length&&(o=Zo(o)),null!=o&&r.on("error",o).on("load",function(t){o(null,t)}),a.call("beforesend",r,s),s.send(null==e?null:e),r},abort:function(){return s.abort(),r},on:function(){var t=a.on.apply(a,arguments);return t===a?r:t}},null!=n){if("function"!=typeof n)throw new Error("invalid callback: "+n);return r.get(n)}return r},h_=function(t,n){return function(e,r){var i=l_(e).mimeType(t).response(n);if(null!=r){if("function"!=typeof r)throw new Error("invalid callback: "+r);return i.get(r)}return i}},p_=h_("text/html",function(t){return document.createRange().createContextualFragment(t.responseText)}),d_=h_("application/json",function(t){return JSON.parse(t.responseText)}),v_=h_("text/plain",function(t){return t.responseText}),__=h_("application/xml",function(t){var n=t.responseXML;if(!n)throw new Error("parse error");return n}),y_=function(t,n){return function(e,r,i){arguments.length<3&&(i=r,r=null);var o=l_(e).mimeType(t);return o.row=function(t){return arguments.length?o.response(Jo(n,r=t)):r},o.row(r),i?o.get(i):o}},g_=y_("text/csv",Vh),m_=y_("text/tab-separated-values",Qh),x_=Array.prototype,b_=x_.map,w_=x_.slice,M_={name:"implicit"},T_=function(t){return function(){return t}},k_=function(t){return+t},N_=[0,1],S_=function(n,e,r){var o,u=n[0],a=n[n.length-1],c=i(u,a,null==e?10:e);switch((r=He(null==r?",f":r)).type){case"s":var s=Math.max(Math.abs(u),Math.abs(a));return null!=r.precision||isNaN(o=bp(c,s))||(r.precision=o),t.formatPrefix(r,s);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(o=wp(c,Math.max(Math.abs(u),Math.abs(a))))||(r.precision=o-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(o=xp(c))||(r.precision=o-2*("%"===r.type))}return t.format(r)},E_=function(t,n){var e,r=0,i=(t=t.slice()).length-1,o=t[r],u=t[i];return u0?t>1?Mu(function(n){n.setTime(Math.floor(n/t)*t)},function(n,e){n.setTime(+n+e*t)},function(n,e){return(e-n)/t}):z_:null};var P_=z_.range,R_=6e4,L_=6048e5,q_=Mu(function(t){t.setTime(1e3*Math.floor(t/1e3))},function(t,n){t.setTime(+t+1e3*n)},function(t,n){return(n-t)/1e3},function(t){return t.getUTCSeconds()}),U_=q_.range,D_=Mu(function(t){t.setTime(Math.floor(t/R_)*R_)},function(t,n){t.setTime(+t+n*R_)},function(t,n){return(n-t)/R_},function(t){return t.getMinutes()}),O_=D_.range,F_=Mu(function(t){var n=t.getTimezoneOffset()*R_%36e5;n<0&&(n+=36e5),t.setTime(36e5*Math.floor((+t-n)/36e5)+n)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getHours()}),I_=F_.range,Y_=Mu(function(t){t.setHours(0,0,0,0)},function(t,n){t.setDate(t.getDate()+n)},function(t,n){return(n-t-(n.getTimezoneOffset()-t.getTimezoneOffset())*R_)/864e5},function(t){return t.getDate()-1}),B_=Y_.range,j_=Tu(0),H_=Tu(1),X_=Tu(2),$_=Tu(3),V_=Tu(4),W_=Tu(5),Z_=Tu(6),G_=j_.range,J_=H_.range,Q_=X_.range,K_=$_.range,ty=V_.range,ny=W_.range,ey=Z_.range,ry=Mu(function(t){t.setDate(1),t.setHours(0,0,0,0)},function(t,n){t.setMonth(t.getMonth()+n)},function(t,n){return n.getMonth()-t.getMonth()+12*(n.getFullYear()-t.getFullYear())},function(t){return t.getMonth()}),iy=ry.range,oy=Mu(function(t){t.setMonth(0,1),t.setHours(0,0,0,0)},function(t,n){t.setFullYear(t.getFullYear()+n)},function(t,n){return n.getFullYear()-t.getFullYear()},function(t){return t.getFullYear()});oy.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setFullYear(Math.floor(n.getFullYear()/t)*t),n.setMonth(0,1),n.setHours(0,0,0,0)},function(n,e){n.setFullYear(n.getFullYear()+e*t)}):null};var uy=oy.range,ay=Mu(function(t){t.setUTCSeconds(0,0)},function(t,n){t.setTime(+t+n*R_)},function(t,n){return(n-t)/R_},function(t){return t.getUTCMinutes()}),cy=ay.range,sy=Mu(function(t){t.setUTCMinutes(0,0,0)},function(t,n){t.setTime(+t+36e5*n)},function(t,n){return(n-t)/36e5},function(t){return t.getUTCHours()}),fy=sy.range,ly=Mu(function(t){t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCDate(t.getUTCDate()+n)},function(t,n){return(n-t)/864e5},function(t){return t.getUTCDate()-1}),hy=ly.range,py=ku(0),dy=ku(1),vy=ku(2),_y=ku(3),yy=ku(4),gy=ku(5),my=ku(6),xy=py.range,by=dy.range,wy=vy.range,My=_y.range,Ty=yy.range,ky=gy.range,Ny=my.range,Sy=Mu(function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCMonth(t.getUTCMonth()+n)},function(t,n){return n.getUTCMonth()-t.getUTCMonth()+12*(n.getUTCFullYear()-t.getUTCFullYear())},function(t){return t.getUTCMonth()}),Ey=Sy.range,Ay=Mu(function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},function(t,n){t.setUTCFullYear(t.getUTCFullYear()+n)},function(t,n){return n.getUTCFullYear()-t.getUTCFullYear()},function(t){return t.getUTCFullYear()});Ay.every=function(t){return isFinite(t=Math.floor(t))&&t>0?Mu(function(n){n.setUTCFullYear(Math.floor(n.getUTCFullYear()/t)*t),n.setUTCMonth(0,1),n.setUTCHours(0,0,0,0)},function(n,e){n.setUTCFullYear(n.getUTCFullYear()+e*t)}):null};var Cy,zy=Ay.range,Py={"-":"",_:" ",0:"0"},Ry=/^\s*\d+/,Ly=/^%/,qy=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g;Ma({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});var Uy=Date.prototype.toISOString?function(t){return t.toISOString()}:t.utcFormat("%Y-%m-%dT%H:%M:%S.%LZ"),Dy=+new Date("2000-01-01T00:00:00.000Z")?function(t){var n=new Date(t);return isNaN(n)?null:n}:t.utcParse("%Y-%m-%dT%H:%M:%S.%LZ"),Oy=1e3,Fy=60*Oy,Iy=60*Fy,Yy=24*Iy,By=7*Yy,jy=30*Yy,Hy=365*Yy,Xy=function(t){return t.match(/.{6}/g).map(function(t){return"#"+t})},$y=Xy("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"),Vy=Xy("393b795254a36b6ecf9c9ede6379398ca252b5cf6bcedb9c8c6d31bd9e39e7ba52e7cb94843c39ad494ad6616be7969c7b4173a55194ce6dbdde9ed6"),Wy=Xy("3182bd6baed69ecae1c6dbefe6550dfd8d3cfdae6bfdd0a231a35474c476a1d99bc7e9c0756bb19e9ac8bcbddcdadaeb636363969696bdbdbdd9d9d9"),Zy=Xy("1f77b4aec7e8ff7f0effbb782ca02c98df8ad62728ff98969467bdc5b0d58c564bc49c94e377c2f7b6d27f7f7fc7c7c7bcbd22dbdb8d17becf9edae5"),Gy=wl($t(300,.5,0),$t(-240,.5,1)),Jy=wl($t(-100,.75,.35),$t(80,1.5,.8)),Qy=wl($t(260,.75,.35),$t(80,1.5,.8)),Ky=$t(),tg=Sa(Xy("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")),ng=Sa(Xy("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),eg=Sa(Xy("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),rg=Sa(Xy("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921")),ig=function(t){return function(){return t}},og=Math.abs,ug=Math.atan2,ag=Math.cos,cg=Math.max,sg=Math.min,fg=Math.sin,lg=Math.sqrt,hg=1e-12,pg=Math.PI,dg=pg/2,vg=2*pg;Oa.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:this._context.lineTo(t,n)}}};var _g=function(t){return new Oa(t)},yg=function(){function t(t){var a,c,s,f=t.length,l=!1;for(null==i&&(u=o(s=ve())),a=0;a<=f;++a)!(a=f;--l)s.point(_[l],y[l]);s.lineEnd(),s.areaEnd()}v&&(_[n]=+e(h,n,t),y[n]=+i(h,n,t),s.point(r?+r(h,n,t):_[n],o?+o(h,n,t):y[n]))}if(p)return s=null,p+""||null}function n(){return yg().defined(u).curve(c).context(a)}var e=Fa,r=null,i=ig(0),o=Ia,u=ig(!0),a=null,c=_g,s=null;return t.x=function(n){return arguments.length?(e="function"==typeof n?n:ig(+n),r=null,t):e},t.x0=function(n){return arguments.length?(e="function"==typeof n?n:ig(+n),t):e},t.x1=function(n){return arguments.length?(r=null==n?null:"function"==typeof n?n:ig(+n),t):r},t.y=function(n){return arguments.length?(i="function"==typeof n?n:ig(+n),o=null,t):i},t.y0=function(n){return arguments.length?(i="function"==typeof n?n:ig(+n),t):i},t.y1=function(n){return arguments.length?(o=null==n?null:"function"==typeof n?n:ig(+n),t):o},t.lineX0=t.lineY0=function(){return n().x(e).y(i)},t.lineY1=function(){return n().x(e).y(o)},t.lineX1=function(){return n().x(r).y(i)},t.defined=function(n){return arguments.length?(u="function"==typeof n?n:ig(!!n),t):u},t.curve=function(n){return arguments.length?(c=n,null!=a&&(s=c(a)),t):c},t.context=function(n){return arguments.length?(null==n?a=s=null:s=c(a=n),t):a},t},mg=function(t,n){return nt?1:n>=t?0:NaN},xg=function(t){return t},bg=Ba(_g);Ya.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,n){this._curve.point(n*Math.sin(t),n*-Math.cos(t))}};var wg=function(){return ja(yg().curve(bg))},Mg=function(){var t=gg().curve(bg),n=t.curve,e=t.lineX0,r=t.lineX1,i=t.lineY0,o=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return ja(e())},delete t.lineX0,t.lineEndAngle=function(){return ja(r())},delete t.lineX1,t.lineInnerRadius=function(){return ja(i())},delete t.lineY0,t.lineOuterRadius=function(){return ja(o())},delete t.lineY1,t.curve=function(t){return arguments.length?n(Ba(t)):n()._curve},t},Tg=function(t,n){return[(n=+n)*Math.cos(t-=Math.PI/2),n*Math.sin(t)]},kg=Array.prototype.slice,Ng={draw:function(t,n){var e=Math.sqrt(n/pg);t.moveTo(e,0),t.arc(0,0,e,0,vg)}},Sg={draw:function(t,n){var e=Math.sqrt(n/5)/2;t.moveTo(-3*e,-e),t.lineTo(-e,-e),t.lineTo(-e,-3*e),t.lineTo(e,-3*e),t.lineTo(e,-e),t.lineTo(3*e,-e),t.lineTo(3*e,e),t.lineTo(e,e),t.lineTo(e,3*e),t.lineTo(-e,3*e),t.lineTo(-e,e),t.lineTo(-3*e,e),t.closePath()}},Eg=Math.sqrt(1/3),Ag=2*Eg,Cg={draw:function(t,n){var e=Math.sqrt(n/Ag),r=e*Eg;t.moveTo(0,-e),t.lineTo(r,0),t.lineTo(0,e),t.lineTo(-r,0),t.closePath()}},zg=Math.sin(pg/10)/Math.sin(7*pg/10),Pg=Math.sin(vg/10)*zg,Rg=-Math.cos(vg/10)*zg,Lg={draw:function(t,n){var e=Math.sqrt(.8908130915292852*n),r=Pg*e,i=Rg*e;t.moveTo(0,-e),t.lineTo(r,i);for(var o=1;o<5;++o){var u=vg*o/5,a=Math.cos(u),c=Math.sin(u);t.lineTo(c*e,-a*e),t.lineTo(a*r-c*i,c*r+a*i)}t.closePath()}},qg={draw:function(t,n){var e=Math.sqrt(n),r=-e/2;t.rect(r,r,e,e)}},Ug=Math.sqrt(3),Dg={draw:function(t,n){var e=-Math.sqrt(n/(3*Ug));t.moveTo(0,2*e),t.lineTo(-Ug*e,-e),t.lineTo(Ug*e,-e),t.closePath()}},Og=-.5,Fg=Math.sqrt(3)/2,Ig=1/Math.sqrt(12),Yg=3*(Ig/2+1),Bg={draw:function(t,n){var e=Math.sqrt(n/Yg),r=e/2,i=e*Ig,o=r,u=e*Ig+e,a=-o,c=u;t.moveTo(r,i),t.lineTo(o,u),t.lineTo(a,c),t.lineTo(Og*r-Fg*i,Fg*r+Og*i),t.lineTo(Og*o-Fg*u,Fg*o+Og*u),t.lineTo(Og*a-Fg*c,Fg*a+Og*c),t.lineTo(Og*r+Fg*i,Og*i-Fg*r),t.lineTo(Og*o+Fg*u,Og*u-Fg*o),t.lineTo(Og*a+Fg*c,Og*c-Fg*a),t.closePath()}},jg=[Ng,Sg,Cg,qg,Lg,Dg,Bg],Hg=function(){};Ja.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:Ga(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Qa.prototype={areaStart:Hg,areaEnd:Hg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x2=t,this._y2=n;break;case 1:this._point=2,this._x3=t,this._y3=n;break;case 2:this._point=3,this._x4=t,this._y4=n,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+n)/6);break;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};Ka.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var e=(this._x0+4*this._x1+t)/6,r=(this._y0+4*this._y1+n)/6;this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break;case 3:this._point=4;default:Ga(this,t,n)}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n}};tc.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,n=this._y,e=t.length-1;if(e>0)for(var r,i=t[0],o=n[0],u=t[e]-i,a=n[e]-o,c=-1;++c<=e;)r=c/e,this._basis.point(this._beta*t[c]+(1-this._beta)*(i+r*u),this._beta*n[c]+(1-this._beta)*(o+r*a));this._x=this._y=null,this._basis.lineEnd()},point:function(t,n){this._x.push(+t),this._y.push(+n)}};var Xg=function t(n){function e(t){return 1===n?new Ja(t):new tc(t,n)}return e.beta=function(n){return t(+n)},e}(.85);ec.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:nc(this,this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2,this._x1=t,this._y1=n;break;case 2:this._point=3;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var $g=function t(n){function e(t){return new ec(t,n)}return e.tension=function(n){return t(+n)},e}(0);rc.prototype={areaStart:Hg,areaEnd:Hg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Vg=function t(n){function e(t){return new rc(t,n)}return e.tension=function(n){return t(+n)},e}(0);ic.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:nc(this,t,n)}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Wg=function t(n){function e(t){return new ic(t,n)}return e.tension=function(n){return t(+n)},e}(0);uc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Zg=function t(n){function e(t){return n?new uc(t,n):new ec(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);ac.prototype={areaStart:Hg,areaEnd:Hg,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x3,this._y3),this._context.closePath();break;case 2:this._context.lineTo(this._x3,this._y3),this._context.closePath();break;case 3:this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5)}},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=n;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=n);break;case 2:this._point=3,this._x5=t,this._y5=n;break;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Gg=function t(n){function e(t){return n?new ac(t,n):new rc(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);cc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){if(t=+t,n=+n,this._point){var e=this._x2-t,r=this._y2-n;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(e*e+r*r,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:oc(this,t,n)}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=n}};var Jg=function t(n){function e(t){return n?new cc(t,n):new ic(t,0)}return e.alpha=function(n){return t(+n)},e}(.5);sc.prototype={areaStart:Hg,areaEnd:Hg,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(t,n){t=+t,n=+n,this._point?this._context.lineTo(t,n):(this._point=1,this._context.moveTo(t,n))}};dc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:pc(this,this._t0,hc(this,this._t0))}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(t,n){var e=NaN;if(t=+t,n=+n,t!==this._x1||n!==this._y1){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;break;case 2:this._point=3,pc(this,hc(this,e=lc(this,t,n)),e);break;default:pc(this,this._t0,e=lc(this,t,n))}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=n,this._t0=e}}},(vc.prototype=Object.create(dc.prototype)).point=function(t,n){dc.prototype.point.call(this,n,t)},_c.prototype={moveTo:function(t,n){this._context.moveTo(n,t)},closePath:function(){this._context.closePath()},lineTo:function(t,n){this._context.lineTo(n,t)},bezierCurveTo:function(t,n,e,r,i,o){this._context.bezierCurveTo(n,t,r,e,o,i)}},yc.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var t=this._x,n=this._y,e=t.length;if(e)if(this._line?this._context.lineTo(t[0],n[0]):this._context.moveTo(t[0],n[0]),2===e)this._context.lineTo(t[1],n[1]);else for(var r=gc(t),i=gc(n),o=0,u=1;u=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,n){switch(t=+t,n=+n,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,n):this._context.moveTo(t,n);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,n),this._context.lineTo(t,n);else{var e=this._x*(1-this._t)+t*this._t;this._context.lineTo(e,this._y),this._context.lineTo(e,n)}}this._x=t,this._y=n}};var Qg=function(t,n){if((i=t.length)>1)for(var e,r,i,o=1,u=t[n[0]],a=u.length;o=0;)e[n]=n;return e},tm=function(t){var n=t.map(bc);return Kg(t).sort(function(t,e){return n[t]-n[e]})},nm=function(t){return function(){return t}};Tc.prototype={constructor:Tc,insert:function(t,n){var e,r,i;if(t){if(n.P=t,n.N=t.N,t.N&&(t.N.P=n),t.N=n,t.R){for(t=t.R;t.L;)t=t.L;t.L=n}else t.R=n;e=t}else this._?(t=Ec(this._),n.P=null,n.N=t,t.P=t.L=n,e=t):(n.P=n.N=null,this._=n,e=null);for(n.L=n.R=null,n.U=e,n.C=!0,t=n;e&&e.C;)e===(r=e.U).L?(i=r.R)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.R&&(Nc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Sc(this,r)):(i=r.L)&&i.C?(e.C=i.C=!1,r.C=!0,t=r):(t===e.L&&(Sc(this,e),e=(t=e).U),e.C=!1,r.C=!0,Nc(this,r)),e=t.U;this._.C=!1},remove:function(t){t.N&&(t.N.P=t.P),t.P&&(t.P.N=t.N),t.N=t.P=null;var n,e,r,i=t.U,o=t.L,u=t.R;if(e=o?u?Ec(u):o:u,i?i.L===t?i.L=e:i.R=e:this._=e,o&&u?(r=e.C,e.C=t.C,e.L=o,o.U=e,e!==u?(i=e.U,e.U=t.U,t=e.R,i.L=t,e.R=u,u.U=e):(e.U=i,i=e,t=e.R)):(r=t.C,t=e),t&&(t.U=i),!r)if(t&&t.C)t.C=!1;else{do{if(t===this._)break;if(t===i.L){if((n=i.R).C&&(n.C=!1,i.C=!0,Nc(this,i),n=i.R),n.L&&n.L.C||n.R&&n.R.C){n.R&&n.R.C||(n.L.C=!1,n.C=!0,Sc(this,n),n=i.R),n.C=i.C,i.C=n.R.C=!1,Nc(this,i),t=this._;break}}else if((n=i.L).C&&(n.C=!1,i.C=!0,Sc(this,i),n=i.L),n.L&&n.L.C||n.R&&n.R.C){n.L&&n.L.C||(n.R.C=!1,n.C=!0,Nc(this,n),n=i.L),n.C=i.C,i.C=n.L.C=!1,Sc(this,i),t=this._;break}n.C=!0,t=i,i=i.U}while(!t.C);t&&(t.C=!1)}}};var em,rm,im,om,um,am=[],cm=[],sm=1e-6,fm=1e-12;Kc.prototype={constructor:Kc,polygons:function(){var t=this.edges;return this.cells.map(function(n){var e=n.halfedges.map(function(e){return Dc(n,t[e])});return e.data=n.site.data,e})},triangles:function(){var t=[],n=this.edges;return this.cells.forEach(function(e,r){if(o=(i=e.halfedges).length)for(var i,o,u,a=e.site,c=-1,s=n[i[o-1]],f=s.left===a?s.right:s.left;++c=a)return null;var c=t-i.site[0],s=n-i.site[1],f=c*c+s*s;do{i=o.cells[r=u],u=null,i.halfedges.forEach(function(e){var r=o.edges[e],a=r.left;if(a!==i.site&&a||(a=r.right)){var c=t-a[0],s=n-a[1],l=c*c+s*s;lt?1:n>=t?0:NaN},t.deviation=_s,t.extent=ys,t.histogram=function(){function t(t){var o,u,a=t.length,c=new Array(a);for(o=0;ol;)h.pop(),--p;var d,v=new Array(p+1);for(o=0;o<=p;++o)(d=v[o]=[]).x0=o>0?h[o-1]:f,d.x1=o=e)for(r=e;++or&&(r=e)}else for(;++o=e)for(r=e;++or&&(r=e);return r},t.mean=function(t,n){var e,r=t.length,i=r,o=-1,u=0;if(null==n)for(;++o=o.length)return null!=e&&n.sort(e),null!=r?r(n):n;for(var c,s,f,l=-1,h=n.length,p=o[i++],d=we(),v=u();++lo.length)return t;var i,a=u[e-1];return null!=r&&e>=o.length?i=t.entries():(i=[],t.each(function(t,r){i.push({key:r,values:n(t,e)})})),null!=a?i.sort(function(t,n){return a(t.key,n.key)}):i}var e,r,i,o=[],u=[];return i={object:function(n){return t(n,0,Me,Te)},map:function(n){return t(n,0,ke,Ne)},entries:function(e){return n(t(e,0,ke,Ne),0)},key:function(t){return o.push(t),i},sortKeys:function(t){return u[o.length-1]=t,i},sortValues:function(t){return e=t,i},rollup:function(t){return r=t,i}}},t.set=Ee,t.map=we,t.keys=function(t){var n=[];for(var e in t)n.push(e);return n},t.values=function(t){var n=[];for(var e in t)n.push(t[e]);return n},t.entries=function(t){var n=[];for(var e in t)n.push({key:e,value:t[e]});return n},t.color=Tt,t.rgb=Et,t.hsl=Pt,t.lab=Ut,t.hcl=jt,t.cubehelix=$t,t.dispatch=h,t.drag=function(){function n(t){t.on("mousedown.drag",e).filter(bt).on("touchstart.drag",o).on("touchmove.drag",u).on("touchend.drag touchcancel.drag",a).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(){if(!p&&d.apply(this,arguments)){var n=c("mouse",v.apply(this,arguments),Ks,this,arguments);n&&(cf(t.event.view).on("mousemove.drag",r,!0).on("mouseup.drag",i,!0),lf(t.event.view),vt(),l=!1,s=t.event.clientX,f=t.event.clientY,n("start"))}}function r(){if(ff(),!l){var n=t.event.clientX-s,e=t.event.clientY-f;l=n*n+e*e>x}y.mouse("drag")}function i(){cf(t.event.view).on("mousemove.drag mouseup.drag",null),_t(t.event.view,l),ff(),y.mouse("end")}function o(){if(d.apply(this,arguments)){var n,e,r=t.event.changedTouches,i=v.apply(this,arguments),o=r.length;for(n=0;nc+p||is+p||or.index){var d=c-a.x-a.vx,v=s-a.y-a.vy,_=d*d+v*v;_t.r&&(t.r=t[n].r)}function r(){if(i){var n,e,r=i.length;for(o=new Array(r),n=0;n=f)){(t.data!==o||t.next)&&(0===i&&(i=rp(),p+=i*i),0===c&&(c=rp(),p+=c*c),p1?(null==n?l.remove(t):l.set(t,i(n)),o):l.get(t)},find:function(n,e,r){var i,o,u,a,c,s=0,f=t.length;for(null==r?r=1/0:r*=r,s=0;s1?(d.on(t,n),o):d.on(t)}}},t.forceX=function(t){function n(t){for(var n,e=0,u=r.length;exr(r[0],r[1])&&(r[1]=i[1]),xr(i[0],r[1])>xr(r[0],r[1])&&(r[0]=i[0])):o.push(r=i);for(u=-1/0,n=0,r=o[e=o.length-1];n<=e;r=i,++n)i=o[n],(a=xr(r[1],i[0]))>u&&(u=a,Ap=i[0],zp=r[1])}return Dp=Op=null,Ap===1/0||Cp===1/0?[[NaN,NaN],[NaN,NaN]]:[[Ap,Cp],[zp,Pp]]},t.geoCentroid=function(t){Fp=Ip=Yp=Bp=jp=Hp=Xp=$p=Vp=Wp=Zp=0,Md(t,Ad);var n=Vp,e=Wp,r=Zp,i=n*n+e*e+r*r;return i<1e-12&&(n=Hp,e=Xp,r=$p,Ip=.12&&i<.234&&r>=-.425&&r<-.214?s:i>=.166&&i<.234&&r>=-.214&&r<-.115?f:c).invert(t)},t.stream=function(t){return e&&r===t?e:e=Ri([c.stream(r=t),s.stream(t),f.stream(t)])},t.precision=function(t){return arguments.length?(c.precision(t),s.precision(t),f.precision(t),n()):c.precision()},t.scale=function(n){return arguments.length?(c.scale(n),s.scale(.35*n),f.scale(n),t.translate(c.translate())):c.scale()},t.translate=function(t){if(!arguments.length)return c.translate();var e=c.scale(),r=+t[0],a=+t[1];return i=c.translate(t).clipExtent([[r-.455*e,a-.238*e],[r+.455*e,a+.238*e]]).stream(l),o=s.translate([r-.307*e,a+.201*e]).clipExtent([[r-.425*e+ed,a+.12*e+ed],[r-.214*e-ed,a+.234*e-ed]]).stream(l),u=f.translate([r-.205*e,a+.212*e]).clipExtent([[r-.214*e+ed,a+.166*e+ed],[r-.115*e-ed,a+.234*e-ed]]).stream(l),n()},t.fitExtent=function(n,e){return Ti(t,n,e)},t.fitSize=function(n,e){return ki(t,n,e)},t.scale(1070)},t.geoAzimuthalEqualArea=function(){return Ei(Yv).scale(124.75).clipAngle(179.999)},t.geoAzimuthalEqualAreaRaw=Yv,t.geoAzimuthalEquidistant=function(){return Ei(Bv).scale(79.4188).clipAngle(179.999)},t.geoAzimuthalEquidistantRaw=Bv,t.geoConicConformal=function(){return Ci(Fi).scale(109.5).parallels([30,30])},t.geoConicConformalRaw=Fi,t.geoConicEqualArea=Fv,t.geoConicEqualAreaRaw=Pi,t.geoConicEquidistant=function(){return Ci(Yi).scale(131.154).center([0,13.9389])},t.geoConicEquidistantRaw=Yi,t.geoEquirectangular=function(){return Ei(Ii).scale(152.63)},t.geoEquirectangularRaw=Ii,t.geoGnomonic=function(){return Ei(Bi).scale(144.049).clipAngle(60)},t.geoGnomonicRaw=Bi,t.geoIdentity=function(){function t(){return i=o=null,u}var n,e,r,i,o,u,a=1,c=0,s=0,f=1,l=1,h=uv,p=null,d=uv;return u={stream:function(t){return i&&o===t?i:i=h(d(o=t))},clipExtent:function(i){return arguments.length?(d=null==i?(p=n=e=r=null,uv):Br(p=+i[0][0],n=+i[0][1],e=+i[1][0],r=+i[1][1]),t()):null==p?null:[[p,n],[e,r]]},scale:function(n){return arguments.length?(h=ji((a=+n)*f,a*l,c,s),t()):a},translate:function(n){return arguments.length?(h=ji(a*f,a*l,c=+n[0],s=+n[1]),t()):[c,s]},reflectX:function(n){return arguments.length?(h=ji(a*(f=n?-1:1),a*l,c,s),t()):f<0},reflectY:function(n){return arguments.length?(h=ji(a*f,a*(l=n?-1:1),c,s),t()):l<0},fitExtent:function(t,n){return Ti(u,t,n)},fitSize:function(t,n){return ki(u,t,n)}}},t.geoProjection=Ei,t.geoProjectionMutator=Ai,t.geoMercator=function(){return Di(Ui).scale(961/ud)},t.geoMercatorRaw=Ui,t.geoOrthographic=function(){return Ei(Hi).scale(249.5).clipAngle(90+ed)},t.geoOrthographicRaw=Hi,t.geoStereographic=function(){return Ei(Xi).scale(250).clipAngle(142)},t.geoStereographicRaw=Xi,t.geoTransverseMercator=function(){var t=Di($i),n=t.center,e=t.rotate;return t.center=function(t){return arguments.length?n([-t[1],t[0]]):(t=n(),[t[1],-t[0]])},t.rotate=function(t){return arguments.length?e([t[0],t[1],t.length>2?t[2]+90:90]):(t=e(),[t[0],t[1],t[2]-90])},e([0,0,90]).scale(159.155)},t.geoTransverseMercatorRaw=$i,t.geoRotation=jd,t.geoStream=Md,t.geoTransform=function(t){return{stream:wi(t)}},t.cluster=function(){function t(t){var o,u=0;t.eachAfter(function(t){var e=t.children;e?(t.x=Wi(e),t.y=Gi(e)):(t.x=o?u+=n(t,o):0,t.y=0,o=t)});var a=Qi(t),c=Ki(t),s=a.x-n(a,c)/2,f=c.x+n(c,a)/2;return t.eachAfter(i?function(n){n.x=(n.x-t.x)*e,n.y=(t.y-n.y)*r}:function(n){n.x=(n.x-s)/(f-s)*e,n.y=(1-(t.y?n.y/t.y:1))*r})}var n=Vi,e=1,r=1,i=!1;return t.separation=function(e){return arguments.length?(n=e,t):n},t.size=function(n){return arguments.length?(i=!1,e=+n[0],r=+n[1],t):i?null:[e,r]},t.nodeSize=function(n){return arguments.length?(i=!0,e=+n[0],r=+n[1],t):i?[e,r]:null},t},t.hierarchy=eo,t.pack=function(){function t(t){return t.x=e/2,t.y=r/2,n?t.eachBefore(No(n)).eachAfter(So(i,.5)).eachBefore(Eo(1)):t.eachBefore(No(ko)).eachAfter(So(To,1)).eachAfter(So(i,t.r/Math.min(e,r))).eachBefore(Eo(Math.min(e,r)/(2*t.r))),t}var n=null,e=1,r=1,i=To;return t.radius=function(e){return arguments.length?(n=wo(e),t):n},t.size=function(n){return arguments.length?(e=+n[0],r=+n[1],t):[e,r]},t.padding=function(n){return arguments.length?(i="function"==typeof n?n:Xv(+n),t):i},t},t.packSiblings=function(t){return bo(t),t},t.packEnclose=Hv,t.partition=function(){function t(t){var u=t.height+1;return t.x0=t.y0=i,t.x1=e,t.y1=r/u,t.eachBefore(n(r,u)),o&&t.eachBefore($v),t}function n(t,n){return function(e){e.children&&Vv(e,e.x0,t*(e.depth+1)/n,e.x1,t*(e.depth+2)/n);var r=e.x0,o=e.y0,u=e.x1-i,a=e.y1-i;u0)throw new Error("cycle");return o}var n=Ao,e=Co;return t.id=function(e){return arguments.length?(n=Mo(e),t):n},t.parentId=function(n){return arguments.length?(e=Mo(n),t):e},t},t.tree=function(){function t(t){var r=Oo(t);if(r.eachAfter(n),r.parent.m=-r.z,r.eachBefore(e),c)t.eachBefore(i);else{var s=t,f=t,l=t;t.eachBefore(function(t){t.xf.x&&(f=t),t.depth>l.depth&&(l=t)});var h=s===f?1:o(s,f)/2,p=h-s.x,d=u/(f.x+h+p),v=a/(l.depth||1);t.eachBefore(function(t){t.x=(t.x+p)*d,t.y=t.depth*v})}return t}function n(t){var n=t.children,e=t.parent.children,i=t.i?e[t.i-1]:null;if(n){qo(t);var u=(n[0].z+n[n.length-1].z)/2;i?(t.z=i.z+o(t._,i._),t.m=t.z-u):t.z=u}else i&&(t.z=i.z+o(t._,i._));t.parent.A=r(t,i,t.parent.A||e[0])}function e(t){t._.x=t.z+t.parent.m,t.m+=t.parent.m}function r(t,n,e){if(n){for(var r,i=t,u=t,a=n,c=i.parent.children[0],s=i.m,f=u.m,l=a.m,h=c.m;a=Ro(a),i=Po(i),a&&i;)c=Po(c),(u=Ro(u)).a=t,(r=a.z+l-i.z-s+o(a._,i._))>0&&(Lo(Uo(a,t,e),t,r),s+=r,f+=r),l+=a.m,s+=i.m,h+=c.m,f+=u.m;a&&!Ro(u)&&(u.t=a,u.m+=l-f),i&&!Po(c)&&(c.t=i,c.m+=s-h,e=t)}return e}function i(t){t.x*=u,t.y=t.depth*a}var o=zo,u=1,a=1,c=null;return t.separation=function(n){return arguments.length?(o=n,t):o},t.size=function(n){return arguments.length?(c=!1,u=+n[0],a=+n[1],t):c?null:[u,a]},t.nodeSize=function(n){return arguments.length?(c=!0,u=+n[0],a=+n[1],t):c?[u,a]:null},t},t.treemap=function(){function t(t){return t.x0=t.y0=0,t.x1=i,t.y1=o,t.eachBefore(n),u=[0],r&&t.eachBefore($v),t}function n(t){var n=u[t.depth],r=t.x0+n,i=t.y0+n,o=t.x1-n,h=t.y1-n;o=n-1){var s=c[t];return s.x0=r,s.y0=i,s.x1=u,void(s.y1=a)}for(var l=f[t],h=e/2+l,p=t+1,d=n-1;p>>1;f[v]a-i){var g=(r*y+u*_)/e;o(t,p,_,r,i,g,a),o(p,n,y,g,i,u,a)}else{var m=(i*y+a*_)/e;o(t,p,_,r,i,u,m),o(p,n,y,r,m,u,a)}}var u,a,c=t.children,s=c.length,f=new Array(s+1);for(f[0]=a=u=0;u=0;--n)s.push(t[r[o[n]][2]]);for(n=+a;na!=s>a&&u<(c-e)*(a-r)/(s-r)+e&&(f=!f),c=e,s=r;return f},t.polygonLength=function(t){for(var n,e,r=-1,i=t.length,o=t[i-1],u=o[0],a=o[1],c=0;++r1)&&(t-=Math.floor(t));var n=Math.abs(t-.5);return Ky.h=360*t-100,Ky.s=1.5-1.5*n,Ky.l=.8-.9*n,Ky+""},t.interpolateWarm=Jy,t.interpolateCool=Qy,t.interpolateViridis=tg,t.interpolateMagma=ng,t.interpolateInferno=eg,t.interpolatePlasma=rg,t.scaleSequential=Ea,t.creator=Hs,t.local=m,t.matcher=Zs,t.mouse=Ks,t.namespace=js,t.namespaces=Bs,t.select=cf,t.selectAll=function(t){return"string"==typeof t?new pt([document.querySelectorAll(t)],[document.documentElement]):new pt([null==t?[]:t],af)},t.selection=dt,t.selector=tf,t.selectorAll=nf,t.style=B,t.touch=sf,t.touches=function(t,n){null==n&&(n=Js().touches);for(var e=0,r=n?n.length:0,i=new Array(r);eh;if(c||(c=t=ve()),lhg)if(d>vg-hg)c.moveTo(l*ag(h),l*fg(h)),c.arc(0,0,l,h,p,!v),f>hg&&(c.moveTo(f*ag(p),f*fg(p)),c.arc(0,0,f,p,h,v));else{var _,y,g=h,m=p,x=h,b=p,w=d,M=d,T=a.apply(this,arguments)/2,k=T>hg&&(i?+i.apply(this,arguments):lg(f*f+l*l)),N=sg(og(l-f)/2,+r.apply(this,arguments)),S=N,E=N;if(k>hg){var A=Ca(k/f*fg(T)),C=Ca(k/l*fg(T));(w-=2*A)>hg?(A*=v?1:-1,x+=A,b-=A):(w=0,x=b=(h+p)/2),(M-=2*C)>hg?(C*=v?1:-1,g+=C,m-=C):(M=0,g=m=(h+p)/2)}var z=l*ag(g),P=l*fg(g),R=f*ag(b),L=f*fg(b);if(N>hg){var q=l*ag(m),U=l*fg(m),D=f*ag(x),O=f*fg(x);if(dhg?Ua(z,P,D,O,q,U,R,L):[R,L],I=z-F[0],Y=P-F[1],B=q-F[0],j=U-F[1],H=1/fg(Aa((I*B+Y*j)/(lg(I*I+Y*Y)*lg(B*B+j*j)))/2),X=lg(F[0]*F[0]+F[1]*F[1]);S=sg(N,(f-X)/(H-1)),E=sg(N,(l-X)/(H+1))}}M>hg?E>hg?(_=Da(D,O,z,P,l,E,v),y=Da(q,U,R,L,l,E,v),c.moveTo(_.cx+_.x01,_.cy+_.y01),Ehg&&w>hg?S>hg?(_=Da(R,L,q,U,f,-S,v),y=Da(z,P,D,O,f,-S,v),c.lineTo(_.cx+_.x01,_.cy+_.y01),S0&&(p+=l);for(null!=e?d.sort(function(t,n){return e(v[t],v[n])}):null!=r&&d.sort(function(n,e){return r(t[n],t[e])}),a=0,s=p?(y-h*m)/p:0;a0?l*s:0)+m,v[c]={data:t[c],index:a,value:l,startAngle:_,endAngle:f,padAngle:g};return v}var n=xg,e=mg,r=null,i=ig(0),o=ig(vg),u=ig(0);return t.value=function(e){return arguments.length?(n="function"==typeof e?e:ig(+e),t):n},t.sortValues=function(n){return arguments.length?(e=n,r=null,t):e},t.sort=function(n){return arguments.length?(r=n,e=null,t):r},t.startAngle=function(n){return arguments.length?(i="function"==typeof n?n:ig(+n),t):i},t.endAngle=function(n){return arguments.length?(o="function"==typeof n?n:ig(+n),t):o},t.padAngle=function(n){return arguments.length?(u="function"==typeof n?n:ig(+n),t):u},t},t.areaRadial=Mg,t.radialArea=Mg,t.lineRadial=wg,t.radialLine=wg,t.pointRadial=Tg,t.linkHorizontal=function(){return $a(Va)},t.linkVertical=function(){return $a(Wa)},t.linkRadial=function(){var t=$a(Za);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t},t.symbol=function(){function t(){var t;if(r||(r=t=ve()),n.apply(this,arguments).draw(r,+e.apply(this,arguments)),t)return r=null,t+""||null}var n=ig(Ng),e=ig(64),r=null;return t.type=function(e){return arguments.length?(n="function"==typeof e?e:ig(e),t):n},t.size=function(n){return arguments.length?(e="function"==typeof n?n:ig(+n),t):e},t.context=function(n){return arguments.length?(r=null==n?null:n,t):r},t},t.symbols=jg,t.symbolCircle=Ng,t.symbolCross=Sg,t.symbolDiamond=Cg,t.symbolSquare=qg,t.symbolStar=Lg,t.symbolTriangle=Dg,t.symbolWye=Bg,t.curveBasisClosed=function(t){return new Qa(t)},t.curveBasisOpen=function(t){return new Ka(t)},t.curveBasis=function(t){return new Ja(t)},t.curveBundle=Xg,t.curveCardinalClosed=Vg,t.curveCardinalOpen=Wg,t.curveCardinal=$g,t.curveCatmullRomClosed=Gg,t.curveCatmullRomOpen=Jg,t.curveCatmullRom=Zg,t.curveLinearClosed=function(t){return new sc(t)},t.curveLinear=_g,t.curveMonotoneX=function(t){return new dc(t)},t.curveMonotoneY=function(t){return new vc(t)},t.curveNatural=function(t){return new yc(t)},t.curveStep=function(t){return new mc(t,.5)},t.curveStepAfter=function(t){return new mc(t,1)},t.curveStepBefore=function(t){return new mc(t,0)},t.stack=function(){function t(t){var o,u,a=n.apply(this,arguments),c=t.length,s=a.length,f=new Array(s);for(o=0;o0){for(var e,r,i,o=0,u=t[0].length;o1)for(var e,r,i,o,u,a,c=0,s=t[n[0]].length;c=0?(r[0]=o,r[1]=o+=i):i<0?(r[1]=u,r[0]=u+=i):r[0]=o},t.stackOffsetNone=Qg,t.stackOffsetSilhouette=function(t,n){if((e=t.length)>0){for(var e,r=0,i=t[n[0]],o=i.length;r0&&(r=(e=t[n[0]]).length)>0){for(var e,r,i,o=0,u=1;uUl&&e.name===n)return new Gn([[t]],yh,n,+r)}return null},t.interrupt=jl,t.voronoi=function(){function t(t){return new Kc(t.map(function(r,i){var o=[Math.round(n(r,i,t)/sm)*sm,Math.round(e(r,i,t)/sm)*sm];return o.index=i,o.data=r,o}),r)}var n=wc,e=Mc,r=null;return t.polygons=function(n){return t(n).polygons()},t.links=function(n){return t(n).links()},t.triangles=function(n){return t(n).triangles()},t.x=function(e){return arguments.length?(n="function"==typeof e?e:nm(+e),t):n},t.y=function(n){return arguments.length?(e="function"==typeof n?n:nm(+n),t):e},t.extent=function(n){return arguments.length?(r=null==n?null:[[+n[0][0],+n[0][1]],[+n[1][0],+n[1][1]]],t):r&&[[r[0][0],r[0][1]],[r[1][0],r[1][1]]]},t.size=function(n){return arguments.length?(r=null==n?null:[[0,0],[+n[0],+n[1]]],t):r&&[r[1][0]-r[0][0],r[1][1]-r[0][1]]},t},t.zoom=function(){function n(t){t.property("__zoom",us).on("wheel.zoom",s).on("mousedown.zoom",f).on("dblclick.zoom",l).filter(cs).on("touchstart.zoom",p).on("touchmove.zoom",d).on("touchend.zoom touchcancel.zoom",v).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function e(t,n){return(n=Math.max(b,Math.min(w,n)))===t.k?t:new ns(n,t.x,t.y)}function r(t,n,e){var r=n[0]-e[0]*t.k,i=n[1]-e[1]*t.k;return r===t.x&&i===t.y?t:new ns(t.k,r,i)}function i(t,n){var e=t.invertX(n[0][0])-M,r=t.invertX(n[1][0])-T,i=t.invertY(n[0][1])-k,o=t.invertY(n[1][1])-S;return t.translate(r>e?(e+r)/2:Math.min(0,e)||Math.max(0,r),o>i?(i+o)/2:Math.min(0,i)||Math.max(0,o))}function o(t){return[(+t[0][0]+ +t[1][0])/2,(+t[0][1]+ +t[1][1])/2]}function u(t,n,e){t.on("start.zoom",function(){a(this,arguments).start()}).on("interrupt.zoom end.zoom",function(){a(this,arguments).end()}).tween("zoom",function(){var t=this,r=arguments,i=a(t,r),u=m.apply(t,r),c=e||o(u),s=Math.max(u[1][0]-u[0][0],u[1][1]-u[0][1]),f=t.__zoom,l="function"==typeof n?n.apply(t,r):n,h=A(f.invert(c).concat(s/f.k),l.invert(c).concat(s/l.k));return function(t){if(1===t)t=l;else{var n=h(t),e=s/n[2];t=new ns(e,c[0]-n[0]*e,c[1]-n[1]*e)}i.zoom(null,t)}})}function a(t,n){for(var e,r=0,i=C.length;rL}n.zoom("mouse",i(r(n.that.__zoom,n.mouse[0]=Ks(n.that),n.mouse[1]),n.extent))},!0).on("mouseup.zoom",function(){e.on("mousemove.zoom mouseup.zoom",null),_t(t.event.view,n.moved),pm(),n.end()},!0),o=Ks(this),u=t.event.clientX,c=t.event.clientY;lf(t.event.view),rs(),n.mouse=[o,this.__zoom.invert(o)],jl(this),n.start()}}function l(){if(g.apply(this,arguments)){var o=this.__zoom,a=Ks(this),c=o.invert(a),s=i(r(e(o,o.k*(t.event.shiftKey?.5:2)),a,c),m.apply(this,arguments));pm(),E>0?cf(this).transition().duration(E).call(u,s,a):cf(this).call(n.transform,s)}}function p(){if(g.apply(this,arguments)){var n,e,r,i,o=a(this,arguments),u=t.event.changedTouches,c=u.length;for(rs(),e=0;eselection*/ +/*d3v5 extent=>selection*/ rect.selection { stroke: red; diff --git a/tvb/interfaces/web/templates/genshi/visualizers/connectivity_edge_bundle/view.html b/tvb/interfaces/web/templates/genshi/visualizers/connectivity_edge_bundle/view.html index c00f890be..0e75f0d54 100644 --- a/tvb/interfaces/web/templates/genshi/visualizers/connectivity_edge_bundle/view.html +++ b/tvb/interfaces/web/templates/genshi/visualizers/connectivity_edge_bundle/view.html @@ -1,6 +1,6 @@
    - + +