Skip to content

Commit

Permalink
No-credential data download (#1180)
Browse files Browse the repository at this point in the history
* No-credential data download

* Update Changelog

* Fix typo

* Run tests on each commit for labeled PR

* Remove runif conditional on label

* logger.warn -> warning

* 🐛 : debug 1

* 🐛 : debug 2

* 🐛 : debug 3

* 🐛 : debug 4
  • Loading branch information
CBroz1 authored Nov 12, 2024
1 parent fa1114e commit 239f2a5
Show file tree
Hide file tree
Showing 30 changed files with 109 additions and 136 deletions.
30 changes: 15 additions & 15 deletions .github/workflows/test-conda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ on:
- '!documentation'
schedule: # once a day at midnight UTC
- cron: '0 0 * * *'
pull_request: # requires approval for first-time contributors
types: [synchronize, opened, reopened, labeled]
workflow_dispatch: # Manually trigger with 'Run workflow' button

concurrency: # Replace Cancel Workflow Action
Expand All @@ -22,8 +24,6 @@ jobs:
env:
OS: ubuntu-latest
PYTHON: '3.9'
UCSF_BOX_TOKEN: ${{ secrets.UCSF_BOX_TOKEN }} # for download and testing
UCSF_BOX_USER: ${{ secrets.UCSF_BOX_USER }}
services:
mysql:
image: datajoint/mysql:8.0
Expand Down Expand Up @@ -57,23 +57,23 @@ jobs:
pip install --quiet .[test]
- name: Download data
env:
BASEURL: ftps://ftp.box.com/trodes_to_nwb_test_data/
NWBFILE: minirec20230622.nwb # Relative to Base URL
VID_ONE: 20230622_sample_01_a1/20230622_sample_01_a1.1.h264
VID_TWO: 20230622_sample_02_a1/20230622_sample_02_a1.1.h264
BASEURL: https://ucsf.box.com/shared/static/
NWB_URL: k3sgql6z475oia848q1rgms4zdh4rkjn.nwb
VID1URL: ykep8ek4ogad20wz4p0vuyuqfo60cv3w.h264
VID2URL: d2jjk0y565ru75xqojio3hymmehzr5he.h264
NWBFILE: minirec20230622.nwb
VID_ONE: 20230622_minirec_01_s1.1.h264
VID_TWO: 20230622_minirec_02_s2.1.h264
RAW_DIR: /home/runner/work/spyglass/spyglass/tests/_data/raw/
VID_DIR: /home/runner/work/spyglass/spyglass/tests/_data/video/
run: |
mkdir -p $RAW_DIR $VID_DIR
wget_opts() { # Declare func with download options
wget \
--recursive --no-verbose --no-host-directories --no-directories \
--user "$UCSF_BOX_USER" --password "$UCSF_BOX_TOKEN" \
-P "$1" "$BASEURL""$2"
curl_opts() { # Declare func with download options
curl -L --output "$1""$2" "$BASEURL""$3"
}
wget_opts $RAW_DIR $NWBFILE
wget_opts $VID_DIR $VID_ONE
wget_opts $VID_DIR $VID_TWO
curl_opts $RAW_DIR $NWBFILE $NWB_URL
curl_opts $VID_DIR $VID_ONE $VID1URL
curl_opts $VID_DIR $VID_TWO $VID2URL
- name: Run tests
run: |
pytest --no-docker --no-dlc
pytest --no-docker --no-dlc tests/
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ dj.FreeTable(dj.conn(), "common_session.session_group").drop()
- Remove debug statement #1164
- Add testing for python versions 3.9, 3.10, 3.11, 3.12 #1169
- Initialize tables in pytests #1181
- Download test data without credentials, trigger on approved PRs #1180
- Allow python \< 3.13 #1169
- Remove numpy version restriction #1169
- Merge table delete removes orphaned master entries #1164
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
name: spyglass
channels:
- conda-forge
- defaults
# - defaults # deprecated
- franklab
- edeno
# - pytorch # dlc-only
Expand Down
2 changes: 1 addition & 1 deletion environment_dlc.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
name: spyglass-dlc
channels:
- conda-forge
- defaults
# - defaults # deprecated
- franklab
- edeno
- pytorch # dlc-only
Expand Down
6 changes: 4 additions & 2 deletions src/spyglass/common/common_behav.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def fetch1_dataframe(self):
id_rp = [(n["id"], n["raw_position"]) for n in self.fetch_nwb()]

if len(set(rp.interval for _, rp in id_rp)) > 1:
logger.warn("Loading DataFrame with multiple intervals.")
logger.warning("Loading DataFrame with multiple intervals.")

df_list = [
pd.DataFrame(
Expand Down Expand Up @@ -395,7 +395,9 @@ def _no_transaction_make(self, key, verbose=True, skip_duplicates=False):
)

if videos is None:
logger.warn(f"No video data interface found in {nwb_file_name}\n")
logger.warning(
f"No video data interface found in {nwb_file_name}\n"
)
return
else:
videos = videos.time_series
Expand Down
12 changes: 7 additions & 5 deletions src/spyglass/common/common_device.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ def insert_from_nwbfile(cls, nwbf, config=None):
+ f"{ndx_devices.keys()}"
)
else:
logger.warn("No conforming data acquisition device metadata found.")
logger.warning(
"No conforming data acquisition device metadata found."
)

@classmethod
def get_all_device_names(cls, nwbf, config) -> tuple:
Expand Down Expand Up @@ -305,7 +307,7 @@ def insert_from_nwbfile(cls, nwbf, config=None):
if device_name_list:
logger.info(f"Inserted camera devices {device_name_list}")
else:
logger.warn("No conforming camera device metadata found.")
logger.warning("No conforming camera device metadata found.")
return device_name_list


Expand Down Expand Up @@ -462,7 +464,7 @@ def insert_from_nwbfile(cls, nwbf, config=None):
if all_probes_types:
logger.info(f"Inserted probes {all_probes_types}")
else:
logger.warn("No conforming probe metadata found.")
logger.warning("No conforming probe metadata found.")

return all_probes_types

Expand Down Expand Up @@ -709,7 +711,7 @@ def create_from_nwbfile(

query = ProbeType & {"probe_type": probe_type}
if len(query) == 0:
logger.warn(
logger.warning(
f"No ProbeType found with probe_type '{probe_type}'. Aborting."
)
return
Expand Down Expand Up @@ -769,7 +771,7 @@ def create_from_nwbfile(
]

if not device_found:
logger.warn(
logger.warning(
"No electrodes in the NWB file were associated with a device "
+ f"named '{nwb_device_name}'."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/common_dio.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def make(self, key):
nwbf, "behavioral_events", pynwb.behavior.BehavioralEvents
)
if behav_events is None:
logger.warn(
logger.warning(
"No conforming behavioral events data interface found in "
+ f"{nwb_file_name}\n"
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/common/common_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def _add_data_acquisition_device_part(self, nwb_file_name, nwbf, config={}):
"data_acquisition_device_name": device_name
}
if len(query) == 0:
logger.warn(
logger.warning(
"Cannot link Session with DataAcquisitionDevice.\n"
+ f"DataAcquisitionDevice does not exist: {device_name}"
)
Expand Down Expand Up @@ -180,7 +180,7 @@ def _add_experimenter_part(
# ensure that the foreign key exists and do nothing if not
query = LabMember & {"lab_member_name": name}
if len(query) == 0:
logger.warn(
logger.warning(
"Cannot link Session with LabMember. "
+ f"LabMember does not exist: {name}"
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/common_subject.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def insert_from_nwbfile(cls, nwbf: NWBFile, config: dict = None):
"""
config = config or dict()
if "Subject" not in config and nwbf.subject is None:
logger.warn("No subject metadata found.\n")
logger.warning("No subject metadata found.\n")
return None

conf = config["Subject"][0] if "Subject" in config else dict()
Expand Down
10 changes: 5 additions & 5 deletions src/spyglass/common/common_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def insert_from_nwbfile(cls, nwbf: pynwb.NWBFile):
"""
tasks_mod = nwbf.processing.get("tasks")
if tasks_mod is None:
logger.warn(f"No tasks processing module found in {nwbf}\n")
logger.warning(f"No tasks processing module found in {nwbf}\n")
return
for task in tasks_mod.data_interfaces.values():
if cls.check_task_table(task):
Expand Down Expand Up @@ -136,7 +136,7 @@ def make(self, key):
tasks_mod = nwbf.processing.get("tasks")
config_tasks = config.get("Tasks", [])
if tasks_mod is None and (not config_tasks):
logger.warn(
logger.warning(
f"No tasks processing module found in {nwbf} or config\n"
)
return
Expand All @@ -163,7 +163,7 @@ def make(self, key):
for camera_id in valid_camera_ids
]
else:
logger.warn(
logger.warning(
f"No camera device found with ID {camera_ids} in NWB "
+ f"file {nwbf}\n"
)
Expand All @@ -186,7 +186,7 @@ def make(self, key):
epoch, session_intervals
)
if target_interval is None:
logger.warn("Skipping epoch.")
logger.warning("Skipping epoch.")
continue
key["interval_list_name"] = target_interval
task_inserts.append(key.copy())
Expand Down Expand Up @@ -219,7 +219,7 @@ def make(self, key):
epoch, session_intervals
)
if target_interval is None:
logger.warn("Skipping epoch.")
logger.warning("Skipping epoch.")
continue
new_key["interval_list_name"] = target_interval
task_inserts.append(key.copy())
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/common/prepopulate/prepopulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def populate_from_yaml(yaml_path: str):
if k in table_cls.primary_key
}
if not primary_key_values:
logger.warn(
logger.warning(
f"Populate: No primary key provided in data {entry_dict} "
+ f"for table {table_cls.__name__}"
)
Expand Down
26 changes: 13 additions & 13 deletions src/spyglass/position/v1/dlc_utils_makevid.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import subprocess
from concurrent.futures import ProcessPoolExecutor, TimeoutError, as_completed
from pathlib import Path
from typing import Tuple

import matplotlib
import matplotlib.pyplot as plt
Expand Down Expand Up @@ -190,10 +189,6 @@ def _set_frame_info(self):

self.pad_len = len(str(self.n_frames))

def _set_input_stats(self, video_filename=None) -> Tuple[int, int]:
"""Get the width and height of the video."""
logger.debug("Getting video stats with ffprobe")

def _set_plot_bases(self):
"""Create the figure and axes for the video."""
logger.debug("Setting plot bases")
Expand Down Expand Up @@ -309,15 +304,20 @@ def centroid_to_px(*idx):
)
)

def _set_orient_line(self, frame, pos_ind):
def orient_list(c):
return [c, c + 30 * np.cos(self.orientation_mean[pos_ind])]
def _get_orient_line(self, pos_ind):
orient = self.orientation_mean[pos_ind]
if isinstance(orient, np.ndarray):
orient = orient[0] # Trodes passes orientation as a 1D array

if np.all(np.isnan(self.orientation_mean[pos_ind])):
self.orientation_line.set_data((np.NaN, np.NaN))
def orient_list(c, axis="x"):
func = np.cos if axis == "x" else np.sin
return [c, c + 30 * func(orient)]

if np.all(np.isnan(orient)):
return ([np.NaN], [np.NaN])
else:
c0, c1 = self._get_centroid_data(pos_ind)
self.orientation_line.set_data(orient_list(c0), orient_list(c1))
x, y = self._get_centroid_data(pos_ind)
return (orient_list(x), orient_list(y, axis="y"))

def _generate_single_frame(self, frame_ind):
"""Generate a single frame and save it as an image."""
Expand Down Expand Up @@ -364,7 +364,7 @@ def _generate_single_frame(self, frame_ind):
)
)
self.centroid_position_dot.set_offsets(dlc_centroid_data)
_ = self._set_orient_line(frame, pos_ind)
self.orientation_line.set_data(self._get_orient_line(pos_ind))

time_delta = pd.Timedelta(
pd.to_datetime(self.position_time[pos_ind] * 1e9, unit="ns")
Expand Down
9 changes: 7 additions & 2 deletions src/spyglass/position/v1/position_trodes_position.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,9 +344,14 @@ def make(self, key):
adj_df = _fix_col_names(raw_df) # adjust 'xloc1' to 'xloc'

limit = params.get("limit", None)
if limit or test_mode:

if limit and not test_mode:
params["debug"] = True
output_video_filename = Path(".") / f"TEST_VID_{limit}.mp4"
elif test_mode:
limit = 10

if limit:
# pytest video data has mismatched shapes in some cases
min_len = limit or min(len(adj_df), len(pos_df), len(video_time))
adj_df = adj_df.head(min_len)
Expand Down Expand Up @@ -401,7 +406,7 @@ def make(self, key):
**params,
)

if limit:
if limit and not test_mode:
return vid_maker

self.insert1(dict(**key, has_video=True))
4 changes: 2 additions & 2 deletions src/spyglass/spikesorting/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def get_group_by_shank(
if omit_ref_electrode_group and (
str(e_group) == str(ref_elec_group)
):
logger.warn(
logger.warning(
f"Omitting electrode group {e_group} from sort groups "
+ "because contains reference."
)
Expand All @@ -117,7 +117,7 @@ def get_group_by_shank(

# omit unitrodes if indicated
if omit_unitrode and len(shank_elect) == 1:
logger.warn(
logger.warning(
f"Omitting electrode group {e_group}, shank {shank} "
+ "from sort groups because unitrode."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/sortingview.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def make(self, key: dict):
LabMember.LabMemberInfo & {"lab_member_name": team_member}
).fetch("google_user_name")
if len(google_user_id) != 1:
logger.warn(
logger.warning(
f"Google user ID for {team_member} does not exist or more than one ID detected;\
permission not given to {team_member}, skipping..."
)
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/sortingview_helper_fn.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def _generate_url(
)

if initial_curation is not None:
logger.warn("found initial curation")
logger.warning("found initial curation")
sorting_curation_uri = kcl.store_json(initial_curation)
else:
sorting_curation_uri = None
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/spikesorting_artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def _get_artifact_times(
[[valid_timestamps[0], valid_timestamps[-1]]]
)
artifact_times_empty = np.asarray([])
logger.warn("No artifacts detected.")
logger.warning("No artifacts detected.")
return recording_interval, artifact_times_empty

# convert indices to intervals
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v0/spikesorting_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def save_sorting_nwb(
AnalysisNwbfile().add(key["nwb_file_name"], analysis_file_name)

if object_ids == "":
logger.warn(
logger.warning(
"Sorting contains no units."
"Created an empty analysis nwb file anyway."
)
Expand Down
4 changes: 2 additions & 2 deletions src/spyglass/spikesorting/v1/artifact.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def insert_selection(cls, key: dict):
"""
query = cls & key
if query:
logger.warn("Similar row(s) already inserted.")
logger.warning("Similar row(s) already inserted.")
return query.fetch(as_dict=True)
key["artifact_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down Expand Up @@ -290,7 +290,7 @@ def _get_artifact_times(
[[valid_timestamps[0], valid_timestamps[-1]]]
)
artifact_times_empty = np.asarray([])
logger.warn("No artifacts detected.")
logger.warning("No artifacts detected.")
return recording_interval, artifact_times_empty

# convert indices to intervals
Expand Down
2 changes: 1 addition & 1 deletion src/spyglass/spikesorting/v1/figurl_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def insert_selection(cls, key: dict):
if "figurl_curation_id" in key:
query = cls & {"figurl_curation_id": key["figurl_curation_id"]}
if query:
logger.warn("Similar row(s) already inserted.")
logger.warning("Similar row(s) already inserted.")
return query.fetch(as_dict=True)
key["figurl_curation_id"] = uuid.uuid4()
cls.insert1(key, skip_duplicates=True)
Expand Down
Loading

0 comments on commit 239f2a5

Please sign in to comment.