Skip to content

Commit

Permalink
Merge pull request #15 from JordanWelsman/0.4-bugfixes
Browse files Browse the repository at this point in the history
Finished v0.4.2. Merging for release now.
  • Loading branch information
JordanWelsman authored Feb 10, 2023
2 parents 703b1f8 + 7bf2a37 commit a35492d
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 32 deletions.
10 changes: 2 additions & 8 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,11 @@ dist/
test/__pycache__/
nexport/__pycache__/
nexport/nexport.egg-info/
nexport.egg-info/
*.csv
*.ipynb
*.json
*.pyc
*.txt
*.xml
model.csv
model.json
model.txt
test.py
weights_and_biases.csv
weights_and_biases.json
weights_and_biases.txt
weights_and_biases.xml
test.py
2 changes: 1 addition & 1 deletion nexport/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Dunder attributes
__version__ = "v0.4.0" # update setup.py
__version__ = "v0.4.1" # update setup.py
__author__ = "Jordan Welsman"

# Import submodules so submodule functions
Expand Down
38 changes: 24 additions & 14 deletions nexport/pytorch/exporting.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Module imports
import nexport
import json
# import os # disbaling os.getlogin() as it causes issues on supercomputers
import datetime as dt
import time as t

Expand Down Expand Up @@ -94,17 +95,18 @@ def create_layer_object(weights: list, biases: list, verbose: int = None) -> lis
return neuron_list # return constructed layer


def create_model_metadata(model_name: str, model_author: str = None) -> dict:
def create_model_metadata(model_name: str, model_author: str = None, using_skip_connections: bool = None) -> dict:
model_metadata = {
"modelName": model_name,
"modelAuthor": model_author,
"compilationDate": str(dt.datetime.now())
"compilationDate": str(dt.datetime.now()),
"usingSkipConnections": using_skip_connections
}

return model_metadata # return model metadata object


def create_model_object(model: object, verbose: int = None, include_metadata: bool = None, model_name: str = None, model_author: str = None) -> object:
def create_model_object(model: object, verbose: int = None, include_metadata: bool = None, model_name: str = None, model_author: str = None, using_skip_connections: bool = None) -> object:
"""
Function which creates a model object from a
collection of layers instantiated with layer
Expand All @@ -116,7 +118,7 @@ def create_model_object(model: object, verbose: int = None, include_metadata: bo
weights, biases = create_paramater_arrays(model=model, verbose=verbose)

if include_metadata: # insert model metadata into model object
model_object["model"] = create_model_metadata(model_name=model_name, model_author=model_author)
model_object["metadata"] = create_model_metadata(model_name=model_name, model_author=model_author, using_skip_connections=using_skip_connections)

if verbose >= 3: # if verbose set to at least 3
print(f"{c.YELLOW}Creating layers...{c.DEFAULT}")
Expand All @@ -137,22 +139,15 @@ def create_model_object(model: object, verbose: int = None, include_metadata: bo
return model_object # return constructed network


def new_func():
"""
Template function
"""
pass


def export_to_json(model: object, filename: str = None, indent: int = None, verbose: int = None, include_metadata: bool = None, model_name: str = None, model_author: str = None) -> None:
def export_to_json(model: object, filename: str = None, indent: int = None, verbose: int = None, include_metadata: bool = None, model_name: str = None, model_author: str = None, using_skip_connections: bool = None) -> None:
"""
Function which exports a passed model
object to a JSON file.
"""
t1 = t.time()
model_object = {}
if include_metadata:
model_object = create_model_object(model=model, verbose=verbose, include_metadata=include_metadata, model_name=model_name, model_author=model_author)
model_object = create_model_object(model=model, verbose=verbose, include_metadata=include_metadata, model_name=model_name, model_author=model_author, using_skip_connections=using_skip_connections)
else:
model_object = create_model_object(model=model, verbose=verbose)
json_object = json.dumps(obj=model_object, indent=indent)
Expand All @@ -169,18 +164,33 @@ def export_to_json(model: object, filename: str = None, indent: int = None, verb
print(f"{c.MAGENTA} Time taken: {c.LIGHTMAGENTA}{round(time, 2)}{c.MAGENTA}s{c.DEFAULT}")


def export_to_json_experimental(model: object, filename: str = None, indent: int = None, verbose: int = None) -> None:
def export_to_json_experimental(model: object, filename: str = None, indent: int = None, verbose: int = None, include_metadata: bool = None, model_name: str = None, model_author: str = None, using_skip_connections: bool = None) -> None:
"""
Function which exports a passed
model object to a JSON file, but
keeps array elements on one line.
"""
t1 = t.time()
model_object = create_model_object(model=model, verbose=verbose)
model_metadata = create_model_metadata(model_name=model_name, model_author=model_author, using_skip_connections=using_skip_connections)
indent = " "

with open(nexport.append_extension(filename=filename, extension="json"), "w") as outfile:
outfile.write("{\n")
if include_metadata:
outfile.write(f"{indent}\"metadata\": " + "{\n")
for d, data in enumerate(model_metadata.keys()):
if type(model_metadata[data]) is str:
outfile.write(f"{indent}{indent}\"{data}\": \"{model_metadata[data]}\"")
elif type(model_metadata[data]) is bool:
outfile.write(f"{indent}{indent}\"{data}\": {str(model_metadata[data]).lower()}")
else:
outfile.write(f"{indent}{indent}\"{data}\": null")
if d < len(model_metadata.keys()) - 1:
outfile.write(",\n")
else:
outfile.write("\n")
outfile.write(f"{indent}" + "},\n")
for layer_type in model_object.keys():
if layer_type == "hidden_layers":
outfile.write(f"{indent}\"{layer_type}\": [\n")
Expand Down
7 changes: 3 additions & 4 deletions nexport/utils.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Module imports
import nexport
import sys
import os

# File imports
from nexport.pytorch import exporting as npte
Expand Down Expand Up @@ -37,16 +36,16 @@ def detect_framework(imported: object = sys.modules.keys()) -> str:
return "none"


def export(model: object, filetype: str, filename: str = "model", indent: int = 4, verbose: int = 1, include_metadata: bool = False, model_name: str = "My Model", model_author: str = os.getlogin()):
def export(model: object, filetype: str, filename: str = "model", indent: int = 4, verbose: int = 1, include_metadata: bool = False, model_name: str = "My Model", model_author: str = None, using_skip_connections: bool = None) -> None:
match nexport.__framework__:
case "pytorch":
match filetype:
case "txt":
npte.export_to_file(model=model, filename=filename)
case "json":
npte.export_to_json(model=model, filename=filename, indent=indent, verbose=verbose, include_metadata=include_metadata, model_name=model_name, model_author=model_author)
npte.export_to_json(model=model, filename=filename, indent=indent, verbose=verbose, include_metadata=include_metadata, model_name=model_name, model_author=model_author, using_skip_connections=using_skip_connections)
case "json_exp":
npte.export_to_json_experimental(model=model, filename=filename, indent=indent, verbose=verbose)
npte.export_to_json_experimental(model=model, filename=filename, indent=indent, verbose=verbose, include_metadata=include_metadata, model_name=model_name, model_author=model_author, using_skip_connections=using_skip_connections)
case "csv" | "xml":
raise NotImplementedError(f"This feature (exporting {nexport.__framework__} in {filetype}) has not yet been implemented.")
case other:
Expand Down
12 changes: 8 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,16 @@
from setuptools import setup

# Arguments
version = "0.4.0" # update __init__.py
version = "0.4.1" # update __init__.py
python_version = ">=3.10"

# Long description from README.md
with open("README.md", "r") as fh:
long_description = fh.read()

# Define list of submodules
py_modules = ["calculators", "colors", "generic", "models", "utils", "pytorch", "tensorflow"]

# Run stup function
setup(
name = 'nexport',
Expand All @@ -19,7 +22,6 @@
author = 'Jordan Welsman',
author_email = 'welsman@lbl.gov',
url = 'https://github.com/JordanWelsman/nexport/',
py_modules = ["__init__", "calculators", "colors", "models", "utils"],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
Expand All @@ -35,9 +37,11 @@
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
],
package_dir = {'': 'nexport'},
package_data = {
'nexport': py_modules
},
install_requires = [
"jutils",
"jutl",
"numpy<1.24",
],
extras_require = {
Expand Down
2 changes: 1 addition & 1 deletion unbuild
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ if [[ $REPLY =~ ^[Yy]$ ]]
then
rm -rf build # remove build directory if exists
rm -rf dist # remove distribution directory if exists
rm -rf nexport/nexport.egg-info # remove egg info directory if exists
find . -name __pycache__ -type d -print0|xargs -0 rm -r -- # remove all pycache directories
find . -name .pytest_cache -type d -print0|xargs -0 rm -r -- # remove all pytest cache directories
find . -name nexport.egg-info -type d -print0|xargs -0 rm -r -- # remove all egg-info directories
echo "Project successfully unbuilt."
else
echo "Operation aborted."
Expand Down

0 comments on commit a35492d

Please sign in to comment.