Skip to content

Commit

Permalink
Merge pull request #2 from linkml/mongodb
Browse files Browse the repository at this point in the history
first pass at mongodb (authored by Claude)
  • Loading branch information
cmungall authored Apr 10, 2024
2 parents 73fface + f958e56 commit 5e3f073
Show file tree
Hide file tree
Showing 23 changed files with 415 additions and 95 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ all-pytest:
$(RUN) pytest -m "integration or not integration"

install-all:
poetry install -E analytics -E app -E tests -E llm
poetry install -E analytics -E app -E tests -E llm -E mongodb

# not yet deployed
doctest:
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/Tutorial.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
{
"cell_type": "markdown",
"source": [
"## Attach and populate a DuckDB in-memory databse\n",
"## Attach and populate a DuckDB in-memory database\n",
"\n",
"Here we will create/attach a database to the client, and populate it\n",
"with some JSON objects. Note that for this example, we don't provide a schema - this is *induced* behind the scenes\n",
Expand Down
133 changes: 127 additions & 6 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ plotly = { version = "*", optional = true }
pystow = "^0.5.4"
black = { version=">=24.0.0", optional = true }
llm = { version="*", optional = true }
pymongo = { version="*", optional = true }
pandas = "^2.2.1"

[tool.poetry.group.dev.dependencies]
Expand Down Expand Up @@ -48,6 +49,7 @@ analytics = ["pandas", "matplotlib", "seaborn", "plotly"]
app = ["streamlit"]
tests = ["black"]
llm = ["llm"]
mongodb = ["pymongo"]

[tool.poetry.scripts]
linkml-store = "linkml_store.cli:main"
Expand Down Expand Up @@ -109,7 +111,7 @@ max-complexity = 10

[tool.codespell]
# TODO: bring in tests in too
skip = '.git,*.pdf,*.svg,./tests,pyproject.toml,*.dill,poetry.lock'
skip = '.git,*.pdf,*.svg,./tests,pyproject.toml,*.dill,poetry.lock,*.ipynb'
# Ignore table where words could be split across rows
# Ignore shortcut specifications like [Ff]alse
ignore-regex = '(\|.*\|.*\|.*\||\[[A-Z][a-z]\][a-z][a-z])'
Expand Down
1 change: 0 additions & 1 deletion src/linkml_data_browser/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
"""linkml-data-browser package."""

10 changes: 4 additions & 6 deletions src/linkml_data_browser/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ def init_reset_filters(cd: ClassDefinition, reset=False):
st.session_state[key] = "" # Assuming text input, adjust for other types


def apply_filters(
collection: Collection, filters: Dict[str, Any], offset: int, limit: int, **kwargs
):
def apply_filters(collection: Collection, filters: Dict[str, Any], offset: int, limit: int, **kwargs):
print(f"FILTERS={filters}")
return collection.find(filters, offset=offset, limit=limit, **kwargs)

Expand All @@ -44,7 +42,7 @@ def render_filter_widget(collection: Collection, attribute: SlotDefinition):
"""Render appropriate Streamlit widget based on column type."""
logger.info("Rendering filter widget")
# print(f"{attribute.name} // RANGE={attribute.range}")
col_type = attribute.range
# col_type = attribute.range
col_name = attribute.name
cols = st.sidebar.columns([1, 10])
with cols[0]:
Expand All @@ -58,7 +56,7 @@ def render_filter_widget(collection: Collection, attribute: SlotDefinition):
with cols[1]:
filter_value = st.text_input(f"Filter by {col_name}", key=f"filter_{col_name}")
return filter_value
#return st.sidebar.text_input(f"Filter by {col_name}")
# return st.sidebar.text_input(f"Filter by {col_name}")
# elif col_type == "integer":
# max_value = con.execute(f"SELECT MAX({col_name}) FROM {tbl_name}").fetchall()[0][0]
# min_value = con.execute(f"SELECT MIN({col_name}) FROM {tbl_name}").fetchall()[0][0]
Expand Down Expand Up @@ -109,7 +107,7 @@ def main():
if prev_value != new_value:
# print(f"CHANGE FOR {att_name}: {prev_value} -> {new_value}")
filter_changed = True
#st.session_state[key] = new_value
# st.session_state[key] = new_value
facet_key = f"facet_view_{att_name}"
if facet_key in st.session_state and st.session_state[facet_key]:
facet_results = collection.query_facets(filters, facet_columns=[att_name])
Expand Down
2 changes: 1 addition & 1 deletion src/linkml_store/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from pathlib import Path

from linkml_store.api import Client

THIS_DIR = Path(__file__).parent

__all__ = ["Client"]

2 changes: 2 additions & 0 deletions src/linkml_store/api/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# flake8: noqa: E402
from linkml_store.api.collection import Collection
from linkml_store.api.database import Database
from linkml_store.api.metadata import MetaData
from linkml_store.api.client import Client
# flake8: noqa

__all__ = ["Client", "Database", "MetaData", "Collection"]
7 changes: 6 additions & 1 deletion src/linkml_store/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,12 @@ class Client:
_databases: Optional[Dict[str, Database]] = None

def attach_database(
self, handle: str, alias: Optional[str] = None, schema_view: Optional[SchemaView] = None, recreate_if_exists=False, **kwargs
self,
handle: str,
alias: Optional[str] = None,
schema_view: Optional[SchemaView] = None,
recreate_if_exists=False,
**kwargs,
) -> Database:
"""
Associate a database with a handle.
Expand Down
Loading

0 comments on commit 5e3f073

Please sign in to comment.