Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
e857753
Working parsing of BSU GPR data
micah-prime Jul 1, 2025
1da6bbf
Issue #61 - script for bsu gpr
micah-prime Jul 1, 2025
0c3e2d3
Issue #60 - working on bulk properties scripts
micah-prime Jul 1, 2025
bc07631
Issue #60 - Working on bulk property script
micah-prime Jul 2, 2025
72b1f7a
Finish testing pit summary files
micah-prime Jul 2, 2025
3557906
Upload script
micah-prime Jul 2, 2025
50e72ac
Update to new file reading structure in insitupy (#73)
micah-prime Jul 15, 2025
e4efd8d
dealing with stash
micah-prime Jul 15, 2025
1ef8e5e
merging in latest work to this branch
micah-prime Jul 15, 2025
0e486ba
use insitupy parse noneg
micah-prime Jul 15, 2025
1b91416
working on tests
micah-prime Jul 15, 2025
58e4692
value is float in points
micah-prime Jul 15, 2025
6686a81
fix layer data tests
micah-prime Jul 15, 2025
d98c020
take advantage of github actions while we're coding
micah-prime Jul 15, 2025
d1bd7b7
Upload - Simplify upload batch check for empty dataframe
jomey Jul 24, 2025
2792beb
Code QC - Update a few doc strings and method return signatures.
jomey Jul 24, 2025
446f3bf
Code QC - Organize imports and remove unused variable.
jomey Jul 24, 2025
cb7cd93
ProfileData - Remove overwrite of super init method.
jomey Jul 24, 2025
243eb11
Merge branch 'api_upload_update' into issue_60_61
jomey Jul 24, 2025
1026cd7
Tests - Skip metadata suite. To be removed soon
jomey Jul 25, 2025
da246f4
Tests - Rename file from poll to pole depth.
jomey Jul 25, 2025
8898474
PointMetadata - Change inheritance to SnowExMetaDataParser
jomey Jul 25, 2025
976c7b1
Upload - Points - Compact handling of instrument, measurement, and type
jomey Jul 25, 2025
0bce765
Upload - Points - Change dict access to use get()
jomey Jul 25, 2025
859540f
Upload - Points - Use PitID as instead of Name mapping.
jomey Jul 25, 2025
1ecc539
Upload - Points - Code and comment formatting
jomey Jul 25, 2025
f1ac482
Tests - Points - Expand cases to look for more records and attributes
jomey Jul 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 5 additions & 3 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,17 @@ on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
branches:
- main
- api_upload_update
workflow_dispatch:

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8, 3.9, "3.10"]
python-version: [3.9, '3.10', 3.11, 3.12]

services:

Expand Down Expand Up @@ -53,7 +55,7 @@ jobs:
pytest -s tests/

# Run coverage only once
- if: ${{ matrix.python-version == '3.9'}}
- if: ${{ matrix.python-version == '3.10'}}
name: Get Coverage for badge
run: |
# Run coverage save the results
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ classifiers = [
dependencies = [
"snowexsql==0.6.0rc1",
"snowmicropyn",
"insitupy~=0.3.0",
"insitupy~=0.4",
"boto3<1.24",
"rasterio<1.4",
"timezonefinder<7",
Expand Down
48 changes: 19 additions & 29 deletions scripts/upload/add_bsu_gpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,49 +8,39 @@

"""

import time
from os.path import abspath, expanduser, join
from os.path import abspath, expanduser

import pandas as pd

from snowexsql.db import get_db
from snowex_db.upload import *
from snowexsql.db import db_session_with_credentials
from snowex_db.upload.points import PointDataCSV


def main():
file = '../download/data/SNOWEX/SNEX20_BSU_GPR.001/2020.01.28/SNEX20_BSU_GPR_pE_01282020_01292020_02042020.csv'
file = ('../download/data/SNOWEX/SNEX20_BSU_GPR.001/'
'2020.01.28/SNEX20_BSU_GPR_pE_01282020_01292020_02042020.csv')

kwargs = {
# Keyword argument to upload depth measurements
'depth_is_metadata': False,

# Constant Metadata for the GPR data
'site_name': 'Grand Mesa',
'observers': 'Tate Meehan',
'instrument': 'pulse EKKO Pro multi-polarization 1 GHz GPR',
'in_timezone': 'UTC',
'out_timezone': 'UTC',
'epsg': 26912,
'doi': 'https://doi.org/10.5067/Q2LFK0QSVGS2'
'campaign_name': 'Grand Mesa',
'observer': 'Tate Meehan',
'instrument': 'gpr',
'instrument_model': 'pulse EKKO Pro multi-polarization 1 GHz GPR',
'timezone': 'UTC',
'doi': 'https://doi.org/10.5067/Q2LFK0QSVGS2',
'name': 'BSU GPR Data',
}

# Break out the path and make it an absolute path
file = abspath(expanduser(file))

# Grab a db connection to a local db named snowex
db_name = 'localhost/snowex'
engine, session = get_db(db_name, credentials='./credentials.json')

# Instantiate the point uploader
csv = PointDataCSV(file, **kwargs)
# Push it to the database
csv.submit(session)

# Close out the session with the DB
session.close()
# Grab a db connection
with db_session_with_credentials() as (_engine, session):
# Instantiate the point uploader
csv = PointDataCSV(file, **kwargs)
# Push it to the database
csv.submit(session)

# return the number of errors for run.py can report it
return len(csv.errors)
# return len(csv.errors)


if __name__ == '__main__':
Expand Down
27 changes: 14 additions & 13 deletions scripts/upload/add_pits_bulk_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,14 @@

import pandas as pd

from snowex_db.upload import PointDataCSV
from snowex_db import db_session
from snowexsql.db import db_session_with_credentials
from snowex_db.upload.points import PointDataCSV


def main():
"""
Add bulk SWE, Depth, Density for 2020 and 2021 timeseires pits
"""
db_name = 'localhost/snowex'
debug = True

# Point to the downloaded data from
data_dir = abspath('../download/data/SNOWEX/')
Expand All @@ -35,14 +33,18 @@ def main():
},
# Preliminary data from 2023 Alask pits
{
# TODO: update this
"DOI": "preliminary_alaska_pits",
"path": "../SNEX23_preliminary/Data/SnowEx23_SnowPits_AKIOP_Summary_SWE_v01.csv"
}
]
# start a db session
# look through the pit summary files
for info in path_details:
doi = info["DOI"]
file_path = join(data_dir, info["path"])
# Read csv and dump new one without the extra header lines
# that make parsing not possible
df = pd.read_csv(
file_path,
skiprows=list(range(32)) + [33]
Expand All @@ -61,16 +63,15 @@ def main():
df.to_csv(new_name, index=False)

# Submit SWE file data as point data
with db_session(
db_name, credentials='credentials.json'
) as (session, engine):
pcsv = PointDataCSV(
new_name, doi=doi, debug=debug,
depth_is_metadata=False,
row_based_crs=True,
row_based_timezone=True
with db_session_with_credentials() as (_engine, session):
u = PointDataCSV(
new_name,
doi=doi,
row_based_timezone=True,
derived=True
)
pcsv.submit(session)

u.submit(session)


if __name__ == '__main__':
Expand Down
21 changes: 15 additions & 6 deletions snowex_db/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,20 @@
to describing data.
"""
import logging
import pandas as pd
import pytz

from dataclasses import dataclass
from typing import Union
from typing import Tuple, Union

from insitupy.io.metadata import MetaDataParser
from insitupy.profiles.metadata import ProfileMetaData
from insitupy.campaigns.snowex.snowex_metadata import SnowExMetaDataParser
from snowexsql.db import get_table_attributes
from snowexsql.tables import Site

from .interpretation import *
from .interpretation import (
manage_degree_values, convert_cardinal_to_degree, add_date_time_keys
)
from .projection import add_geom, reproject_point_in_dict
from .string_management import *
from .utilities import assign_default_kwargs, get_logger
Expand Down Expand Up @@ -119,25 +124,29 @@ class SnowExProfileMetadata(ProfileMetaData):
wind: Union[str, None] = None


class ExtendedSnowExMetadataParser(MetaDataParser):
class ExtendedSnowExMetadataParser(SnowExMetaDataParser):
"""
Extend the parser to update the parsing function
"""

def parse(self):
def parse(self, filename: str) \
-> Tuple[SnowExProfileMetadata, list, dict, int]:
"""
Parse the file and return a metadata object.
We can override these methods as needed to parse the different
metadata

This populates self.rough_obj

Args:
filename: Path to the file from which to parse metadata

Returns:
(metadata object, column list, position of header in file)
"""
(
meta_lines, columns, columns_map, header_position
) = self.find_header_info(self._fname)
) = self.find_header_info(filename)
self._rough_obj = self._preparse_meta(meta_lines)
# Create a standard metadata object
metadata = SnowExProfileMetadata(
Expand Down
Loading
Loading