Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 31 additions & 24 deletions scripts/upload/add_snow_depths.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,43 @@
"""
Uploads the Snowex 2020 depths to the database

1. Data must be downloaded via sh ../download/download_nsidc.sh
2A. python run.py # To run all together all at once
2B. python add_snow_depths.py # To run individually
Script to uploads the Snowex 2020 depths to the database\
"""

import glob
from os.path import abspath, join

from snowex_db.upload.points import PointDataCSV
from earthaccess_data import get_files
from import_logger import get_logger
from snowexsql.db import db_session_with_credentials
from snowex_db.upload.points import PointDataCSV
LOG = get_logger()

SNOWEX_DEPTHS_MAP = {
"SNEX20_SD": "10.5067/9IA978JIACAR"
}

def main():
# Site name
site_name = 'Grand Mesa'
timezone = 'US/Mountain'
doi = 'https://doi.org/10.5067/9IA978JIACAR'
# Read in the Grand Mesa Snow Depths Data
base = abspath(join('../download/data/SNOWEX/SNEX20_SD.001/'))

profiles = glob.glob(join(base, '*/*.csv'))
def main(file_list: list, doi: str) -> None:
LOG.info(f"Uploading DOI: {doi} with {len(file_list)} files.")

kwargs = {
"site_name": "Grand Mesa",
"campaign_name": "Grand Mesa",
"timezone": "US/Mountain",
"doi": doi,
}

file_list = [
file
for file in file_list
if str(file).lower().endswith(".csv")
]

with db_session_with_credentials() as (_engine, session):
for f in profiles:
for file in file_list:
uploader = PointDataCSV(session,
filename=f,
campaign_name=site_name,
doi=doi,
site_name=site_name,
timezone=timezone)
file,
**kwargs)
uploader.submit()


if __name__ == '__main__':
main()
for data_set_id, doi in SNOWEX_DEPTHS_MAP.items():
with get_files(data_set_id, doi) as files:
main(files, doi)