resolve conflict and DIDControllerKey into DID
This commit is contained in:
commit
add3747321
|
@ -0,0 +1,2 @@
|
|||
"ExO";"https://verify.exo.cat"
|
||||
"Somos Connexión";"https://verify.somosconexion.coop"
|
|
|
@ -1,9 +1,128 @@
|
|||
import csv
|
||||
import json
|
||||
import copy
|
||||
import pandas as pd
|
||||
from jsonschema import validate
|
||||
|
||||
from django import forms
|
||||
from django.core.exceptions import ValidationError
|
||||
from idhub.models import (
|
||||
DID,
|
||||
File_datas,
|
||||
Schemas,
|
||||
VerificableCredential,
|
||||
)
|
||||
from idhub_auth.models import User
|
||||
|
||||
|
||||
class ImportForm(forms.Form):
|
||||
did = forms.ChoiceField(choices=[])
|
||||
schema = forms.ChoiceField(choices=[])
|
||||
file_import = forms.FileField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._schema = None
|
||||
self._did = None
|
||||
self.rows = {}
|
||||
self.user = kwargs.pop('user', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields['did'].choices = [
|
||||
(x.did, x.label) for x in DID.objects.filter(user=self.user)
|
||||
]
|
||||
self.fields['schema'].choices = [
|
||||
(x.id, x.name()) for x in Schemas.objects.filter()
|
||||
]
|
||||
|
||||
def clean_did(self):
|
||||
data = self.cleaned_data["did"]
|
||||
did = DID.objects.filter(
|
||||
user=self.user,
|
||||
did=data
|
||||
)
|
||||
|
||||
if not did.exists():
|
||||
raise ValidationError("Did is not valid!")
|
||||
|
||||
self._did = did.first()
|
||||
|
||||
return data
|
||||
|
||||
def clean_schema(self):
|
||||
data = self.cleaned_data["schema"]
|
||||
schema = Schemas.objects.filter(
|
||||
id=data
|
||||
)
|
||||
if not schema.exists():
|
||||
raise ValidationError("Schema is not valid!")
|
||||
|
||||
self._schema = schema.first()
|
||||
|
||||
return data
|
||||
|
||||
def clean_file_import(self):
|
||||
data = self.cleaned_data["file_import"]
|
||||
self.file_name = data.name
|
||||
if File_datas.objects.filter(file_name=self.file_name, success=True).exists():
|
||||
raise ValidationError("This file already exists!")
|
||||
|
||||
self.json_schema = json.loads(self._schema.data)
|
||||
df = pd.read_csv (data, delimiter="\t", quotechar='"', quoting=csv.QUOTE_ALL)
|
||||
data_pd = df.fillna('').to_dict()
|
||||
|
||||
if not data_pd:
|
||||
self.exception("This file is empty!")
|
||||
|
||||
for n in range(df.last_valid_index()+1):
|
||||
row = {}
|
||||
for k in data_pd.keys():
|
||||
row[k] = data_pd[k][n]
|
||||
|
||||
user = self.validate_jsonld(n, row)
|
||||
self.rows[user] = row
|
||||
|
||||
return data
|
||||
|
||||
def save(self, commit=True):
|
||||
table = []
|
||||
for k, v in self.rows.items():
|
||||
table.append(self.create_credential(k, v))
|
||||
|
||||
if commit:
|
||||
for cred in table:
|
||||
cred.save()
|
||||
File_datas.objects.create(file_name=self.file_name)
|
||||
return table
|
||||
|
||||
return
|
||||
|
||||
def validate_jsonld(self, line, row):
|
||||
try:
|
||||
validate(instance=row, schema=self.json_schema)
|
||||
except Exception as e:
|
||||
msg = "line {}: {}".format(line+1, e)
|
||||
self.exception(msg)
|
||||
|
||||
user = User.objects.filter(email=row.get('email'))
|
||||
if not user:
|
||||
txt = _('The user not exist!')
|
||||
msg = "line {}: {}".format(line+1, txt)
|
||||
self.exception(msg)
|
||||
|
||||
return user.first()
|
||||
|
||||
def create_credential(self, user, row):
|
||||
d = copy.copy(self.json_schema)
|
||||
d['instance'] = row
|
||||
return VerificableCredential(
|
||||
verified=False,
|
||||
user=user,
|
||||
data=json.dumps(d)
|
||||
)
|
||||
|
||||
def exception(self, msg):
|
||||
File_datas.objects.create(file_name=self.file_name, success=False)
|
||||
raise ValidationError(msg)
|
||||
|
||||
|
||||
class SchemaForm(forms.Form):
|
||||
file_template = forms.FileField()
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
import csv
|
||||
import json
|
||||
import copy
|
||||
import logging
|
||||
|
@ -11,12 +10,18 @@ from smtplib import SMTPException
|
|||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.generic.base import TemplateView
|
||||
from django.views.generic.edit import UpdateView, CreateView, DeleteView
|
||||
from django.views.generic.edit import (
|
||||
CreateView,
|
||||
DeleteView,
|
||||
FormView,
|
||||
UpdateView,
|
||||
)
|
||||
from django.shortcuts import get_object_or_404, redirect
|
||||
from django.urls import reverse_lazy
|
||||
from django.http import HttpResponse
|
||||
from django.contrib import messages
|
||||
from apiregiter import iota
|
||||
from utils.apiregiter import iota
|
||||
from utils import credtools
|
||||
from idhub_auth.models import User
|
||||
from idhub.mixins import AdminView
|
||||
from idhub.email.views import NotifyActivateUserByEmail
|
||||
|
@ -60,7 +65,7 @@ class SchemasMix(AdminView, TemplateView):
|
|||
section = "Templates"
|
||||
|
||||
|
||||
class ImportExport(AdminView, TemplateView):
|
||||
class ImportExport(AdminView):
|
||||
title = _("Massive Data Management")
|
||||
section = "ImportExport"
|
||||
|
||||
|
@ -694,7 +699,7 @@ class SchemasImportAddView(SchemasMix):
|
|||
return data
|
||||
|
||||
|
||||
class ImportView(ImportExport):
|
||||
class ImportView(ImportExport, TemplateView):
|
||||
template_name = "idhub/admin/import.html"
|
||||
subtitle = _('Import')
|
||||
icon = ''
|
||||
|
@ -707,7 +712,7 @@ class ImportView(ImportExport):
|
|||
return context
|
||||
|
||||
|
||||
class ImportStep2View(ImportExport):
|
||||
class ImportStep2View(ImportExport, TemplateView):
|
||||
template_name = "idhub/admin/import_step2.html"
|
||||
subtitle = _('Import')
|
||||
icon = ''
|
||||
|
@ -720,93 +725,23 @@ class ImportStep2View(ImportExport):
|
|||
return context
|
||||
|
||||
|
||||
class ImportStep3View(ImportExport):
|
||||
template_name = "idhub/admin/import_step3.html"
|
||||
class ImportAddView(ImportExport, FormView):
|
||||
template_name = "idhub/admin/import_add.html"
|
||||
subtitle = _('Import')
|
||||
icon = ''
|
||||
form_class = ImportForm
|
||||
success_url = reverse_lazy('idhub:admin_import')
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update({
|
||||
'form': ImportForm(),
|
||||
})
|
||||
return context
|
||||
def get_form_kwargs(self):
|
||||
kwargs = super().get_form_kwargs()
|
||||
kwargs['user'] = self.request.user
|
||||
return kwargs
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.pk = kwargs['pk']
|
||||
self.schema = get_object_or_404(Schemas, pk=self.pk)
|
||||
form = ImportForm(request.POST, request.FILES)
|
||||
if form.is_valid():
|
||||
result = self.handle_uploaded_file()
|
||||
if not result:
|
||||
messages.error(request, _("There are some errors in the file"))
|
||||
return super().get(request, *args, **kwargs)
|
||||
return redirect(self.success_url)
|
||||
def form_valid(self, form):
|
||||
cred = form.save()
|
||||
if cred:
|
||||
messages.success(self.request, _("The file import was successfully!"))
|
||||
else:
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
return super().post(request, *args, **kwargs)
|
||||
|
||||
def handle_uploaded_file(self):
|
||||
f = self.request.FILES.get('file_import')
|
||||
if not f:
|
||||
messages.error(self.request, _("There aren't file"))
|
||||
return
|
||||
|
||||
file_name = f.name
|
||||
if File_datas.objects.filter(file_name=file_name, success=True).exists():
|
||||
messages.error(self.request, _("This file already exists!"))
|
||||
return
|
||||
|
||||
self.json_schema = json.loads(self.schema.data)
|
||||
df = pd.read_csv (f, delimiter="\t", quotechar='"', quoting=csv.QUOTE_ALL)
|
||||
data_pd = df.fillna('').to_dict()
|
||||
rows = {}
|
||||
|
||||
if not data_pd:
|
||||
File_datas.objects.create(file_name=file_name, success=False)
|
||||
return
|
||||
|
||||
for n in range(df.last_valid_index()+1):
|
||||
row = {}
|
||||
for k in data_pd.keys():
|
||||
row[k] = data_pd[k][n]
|
||||
|
||||
user = self.validate(n, row)
|
||||
if not user:
|
||||
File_datas.objects.create(file_name=file_name, success=False)
|
||||
return
|
||||
|
||||
rows[user] = row
|
||||
|
||||
File_datas.objects.create(file_name=file_name)
|
||||
for k, v in rows.items():
|
||||
self.create_credential(k, v)
|
||||
|
||||
return True
|
||||
|
||||
def validate(self, line, row):
|
||||
try:
|
||||
validate(instance=row, schema=self.json_schema)
|
||||
except Exception as e:
|
||||
messages.error(self.request, "line {}: {}".format(line+1, e))
|
||||
return
|
||||
|
||||
user = User.objects.filter(email=row.get('email'))
|
||||
if not user:
|
||||
txt = _('The user not exist!')
|
||||
messages.error(self.request, "line {}: {}".format(line+1, txt))
|
||||
return
|
||||
|
||||
return user.first()
|
||||
|
||||
def create_credential(self, user, row):
|
||||
d = copy.copy(self.json_schema)
|
||||
d['instance'] = row
|
||||
return VerificableCredential.objects.create(
|
||||
verified=False,
|
||||
user=user,
|
||||
data=json.dumps(d)
|
||||
)
|
||||
messages.error(self.request, _("Error importing the file!"))
|
||||
return super().form_valid(form)
|
||||
|
||||
|
|
|
@ -1,5 +1,10 @@
|
|||
import os
|
||||
import csv
|
||||
|
||||
from pathlib import Path
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.contrib.auth import get_user_model
|
||||
from decouple import config
|
||||
from idhub.models import Organization
|
||||
|
||||
|
||||
|
@ -10,20 +15,20 @@ class Command(BaseCommand):
|
|||
help = "Insert minimum datas for the project"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
admin = 'admin@example.org'
|
||||
pw_admin = '1234'
|
||||
ADMIN_EMAIL = config('ADMIN_EMAIL', 'admin@example.org')
|
||||
ADMIN_PASSWORD = config('ADMIN_PASSWORD', '1234')
|
||||
USER_EMAIL = config('USER_EMAIL', 'user1@example.org')
|
||||
USER_PASSWORD = config('USER_PASSWORD', '1234')
|
||||
|
||||
user = 'user1@example.org'
|
||||
pw_user = '1234'
|
||||
organization = [
|
||||
("ExO", "https://verify.exo.cat"),
|
||||
("Somos Connexión", "https://verify.somosconexion.coop")
|
||||
]
|
||||
self.create_admin_users(ADMIN_EMAIL, ADMIN_PASSWORD)
|
||||
self.create_users(USER_EMAIL, USER_PASSWORD)
|
||||
|
||||
# self.create_admin_users(admin, pw_admin)
|
||||
self.create_users(user, pw_user)
|
||||
for o in organization:
|
||||
self.create_organizations(*o)
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent.parent.parent
|
||||
ORGANIZATION = os.path.join(BASE_DIR, 'examples/organizations.csv')
|
||||
with open(ORGANIZATION, newline='\n') as csvfile:
|
||||
f = csv.reader(csvfile, delimiter=';', quotechar='"')
|
||||
for r in f:
|
||||
self.create_organizations(r[0].strip(), r[1].strip())
|
||||
|
||||
def create_admin_users(self, email, password):
|
||||
User.objects.create_superuser(email=email, password=password)
|
||||
|
|
|
@ -16,13 +16,16 @@ class DID(models.Model):
|
|||
created_at = models.DateTimeField(auto_now=True)
|
||||
did = models.CharField(max_length=250, unique=True)
|
||||
label = models.CharField(max_length=50)
|
||||
# In JWK format. Must be stored as-is and passed whole to library functions.
|
||||
# Example key material:
|
||||
# '{"kty":"OKP","crv":"Ed25519","x":"oB2cPGFx5FX4dtS1Rtep8ac6B__61HAP_RtSzJdPxqs","d":"OJw80T1CtcqV0hUcZdcI-vYNBN1dlubrLaJa0_se_gU"}'
|
||||
key_material = models.CharField(max_length=250)
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='dids',
|
||||
null=True,
|
||||
)
|
||||
# kind = "KEY|WEB"
|
||||
|
||||
@property
|
||||
def is_organization_did(self):
|
||||
|
@ -31,18 +34,6 @@ class DID(models.Model):
|
|||
return False
|
||||
|
||||
|
||||
class DIDControllerKey(models.Model):
|
||||
# In JWK format. Must be stored as-is and passed whole to library functions.
|
||||
# Example key material:
|
||||
# '{"kty":"OKP","crv":"Ed25519","x":"oB2cPGFx5FX4dtS1Rtep8ac6B__61HAP_RtSzJdPxqs","d":"OJw80T1CtcqV0hUcZdcI-vYNBN1dlubrLaJa0_se_gU"}'
|
||||
key_material = models.CharField(max_length=250)
|
||||
owner_did = models.ForeignKey(
|
||||
DID,
|
||||
on_delete=models.CASCADE,
|
||||
related_name="keys"
|
||||
)
|
||||
|
||||
|
||||
class Schemas(models.Model):
|
||||
file_schema = models.CharField(max_length=250)
|
||||
data = models.TextField()
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
</tbody>
|
||||
</table>
|
||||
<div class="form-actions-no-box">
|
||||
<a class="btn btn-green-admin" href="{% url 'idhub:admin_import_step2' %}">{% translate "Import Datas" %} <i class="bi bi-plus"></i></a>
|
||||
<a class="btn btn-green-admin" href="{% url 'idhub:admin_import_add' %}">{% translate "Import Datas" %} <i class="bi bi-plus"></i></a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
{% extends "idhub/base_admin.html" %}
|
||||
{% load i18n %}
|
||||
|
||||
{% block content %}
|
||||
<h3>
|
||||
<i class="{{ icon }}"></i>
|
||||
{{ subtitle }}
|
||||
</h3>
|
||||
<div class="row mt-5">
|
||||
<div class="col">
|
||||
<div class="table-responsive">
|
||||
<table class="table table-striped table-sm">
|
||||
<thead>
|
||||
<tr>
|
||||
<th scope="col"><button type="button" class="btn btn-grey border border-dark">{% trans 'Created at' %}</button></th>
|
||||
<th scope="col"><button type="button" class="btn btn-grey border border-dark">{% trans 'Template file' %}</button></th>
|
||||
<th scope="col"></th>
|
||||
<th scope="col"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for schema in schemas.all %}
|
||||
<tr style="font-size:15px;">
|
||||
<td>{{ schema.created_at }}</td>
|
||||
<td>{{ schema.file_schema }}</td>
|
||||
<td><a class="btn btn-green-admin" href="{% url 'idhub:admin_import_step3' schema.id %}" title="{% trans 'Import Dates' %}">{% trans 'Import Dates' %}</a></td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -169,8 +169,6 @@ urlpatterns = [
|
|||
name='admin_schemas_import_add'),
|
||||
path('admin/import', views_admin.ImportView.as_view(),
|
||||
name='admin_import'),
|
||||
path('admin/import/new', views_admin.ImportStep2View.as_view(),
|
||||
name='admin_import_step2'),
|
||||
path('admin/import/<int:pk>/', views_admin.ImportStep3View.as_view(),
|
||||
name='admin_import_step3'),
|
||||
path('admin/import/new', views_admin.ImportAddView.as_view(),
|
||||
name='admin_import_add'),
|
||||
]
|
||||
|
|
|
@ -12,7 +12,7 @@ from django.shortcuts import get_object_or_404, redirect
|
|||
from django.urls import reverse_lazy
|
||||
from django.http import HttpResponse
|
||||
from django.contrib import messages
|
||||
from apiregiter import iota
|
||||
from utils.apiregiter import iota
|
||||
from idhub.user.forms import ProfileForm, RequestCredentialForm, CredentialPresentationForm
|
||||
from idhub.mixins import UserView
|
||||
from idhub.models import DID, VerificableCredential
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -8,3 +8,5 @@ pandas==2.1.1
|
|||
requests==2.31.0
|
||||
didkit==0.3.2
|
||||
jinja2==3.1.2
|
||||
jsonref==1.1.0
|
||||
pyld==2.0.3
|
||||
|
|
|
@ -178,4 +178,8 @@ MESSAGE_TAGS = {
|
|||
messages.ERROR: 'alert-danger',
|
||||
}
|
||||
|
||||
LOCALE_PATHS = [
|
||||
os.path.join(BASE_DIR, 'locale'),
|
||||
]
|
||||
|
||||
AUTH_USER_MODEL = 'idhub_auth.User'
|
||||
|
|
|
@ -0,0 +1,174 @@
|
|||
import json
|
||||
#import jsonld
|
||||
import csv
|
||||
import sys
|
||||
import jsonschema
|
||||
from pyld import jsonld
|
||||
#from jsonschema import validate, ValidationError
|
||||
import requests
|
||||
from pyld import jsonld
|
||||
import jsonref
|
||||
|
||||
#def remove_null_values(dictionary):
|
||||
# return {k: v for k, v in dictionary.items() if v is not None}
|
||||
|
||||
def _remove_null_values(dictionary):
|
||||
filtered = {k: v for k, v in dictionary.items() if v is not None and v != ''}
|
||||
dictionary.clear()
|
||||
dictionary.update(filtered)
|
||||
|
||||
def validate_context(jsld):
|
||||
"""Validate a @context string through expanding"""
|
||||
context = jsld["@context"]
|
||||
# schema = jsld["credentialSchema"]
|
||||
# Validate the context
|
||||
try:
|
||||
jsonld.expand(context)
|
||||
print("Context is valid")
|
||||
except jsonld.JsonLdError:
|
||||
print("Context is not valid")
|
||||
return False
|
||||
return True
|
||||
|
||||
def compact_js(doc, context):
|
||||
"""Validate a @context string through compacting, returns compacted context"""
|
||||
try:
|
||||
compacted = jsonld.compact(doc, context)
|
||||
print(json.dumps(compacted, indent=2))
|
||||
except jsonld.JsonLdError as e:
|
||||
print(f"Error compacting document: {e}")
|
||||
return None
|
||||
return compacted
|
||||
|
||||
def dereference_context_file(json_file):
|
||||
"""Dereference and return json-ld context from file"""
|
||||
json_text = open(json_file).read()
|
||||
json_dict = json.loads(json_text)
|
||||
return dereference_context(json_dict)
|
||||
|
||||
|
||||
def dereference_context(jsonld_dict):
|
||||
"""Dereference and return json-ld context"""
|
||||
try:
|
||||
# Extract the context from the parsed JSON-LD
|
||||
context_urls = jsonld_dict.get('@context')
|
||||
if not context_urls:
|
||||
raise ValueError("No context found in the JSON-LD string.")
|
||||
return None
|
||||
|
||||
# Dereference each context URL
|
||||
dereferenced_contexts = []
|
||||
for context_url in context_urls:
|
||||
response = requests.get(context_url)
|
||||
response.raise_for_status() # Raise an exception if the request failed
|
||||
context_dict = response.json()
|
||||
dereferenced_context = jsonref.loads(json.dumps(context_dict))
|
||||
dereferenced_contexts.append(dereferenced_context)
|
||||
|
||||
print(f"dereferenced contexts:\n", json.dumps(dereferenced_contexts, indent=4))
|
||||
return dereferenced_contexts
|
||||
|
||||
except (json.JSONDecodeError, requests.RequestException, jsonref.JsonRefError) as e:
|
||||
print(f"An error occurred: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def validate_schema_file(json_schema_file):
|
||||
"""Validate standalone schema from file"""
|
||||
try:
|
||||
json_schema = open(json_schema_file).read()
|
||||
validate_schema(json_schema)
|
||||
except Exception as e:
|
||||
print(f"Error loading file {json_schema_file} or validating schema {json_schema}: {e}")
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_schema(json_schema):
|
||||
"""Validate standalone schema, returns bool (uses Draft202012Validator, alt: Draft7Validator, alt: Draft4Validator, Draft6Validator )"""
|
||||
try:
|
||||
jsonschema.validators.Draft202012Validator.check_schema(json_schema)
|
||||
# jsonschema.validators.Draft7Validator.check_schema(json_schema)
|
||||
return True
|
||||
except jsonschema.exceptions.SchemaError as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
def validate_json(json_data, json_schema):
|
||||
"""Validate json string basic (no format) with schema, returns bool"""
|
||||
try:
|
||||
jsonschema.validate(instance=json_data, schema=json_schema)
|
||||
except jsonschema.exceptions.ValidationError as err:
|
||||
print('Validation error: ', json_data, '\n')
|
||||
return False
|
||||
return True
|
||||
|
||||
def validate_json_format(json_data, json_schema):
|
||||
"""Validate a json string basic (including format) with schema, returns bool"""
|
||||
try:
|
||||
jsonschema.validate(instance=json_data, schema=json_schema, format_checker=FormatChecker())
|
||||
except jsonschema.exceptions.ValidationError as err:
|
||||
print('Validation error: ', json_data, '\n')
|
||||
return False
|
||||
return True
|
||||
|
||||
def schema_to_csv(schema, csv_file_path):
|
||||
"""Extract headers from an schema and write to file, returns bool"""
|
||||
headers = list(schema['properties'].keys())
|
||||
|
||||
# Create a CSV file with the headers
|
||||
with open(csv_file_path, 'w', newline='') as csv_file:
|
||||
writer = csv.writer(csv_file)
|
||||
writer.writerow(headers)
|
||||
return True
|
||||
|
||||
|
||||
def csv_to_json(csvFilePath, schema, jsonFilePath):
|
||||
"""Read from a csv file, check schema, write to json file, returns bool"""
|
||||
jsonArray = []
|
||||
# Read CSV file
|
||||
with open(csvFilePath, 'r') as csvf:
|
||||
# Load CSV file data using csv library's dictionary reader
|
||||
csvReader = csv.DictReader(csvf)
|
||||
|
||||
# Convert each CSV row into python dict and validate against schema
|
||||
for row in csvReader:
|
||||
_remove_null_values(row)
|
||||
print('Row: ', row, '\n')
|
||||
validate_json(row, schema)
|
||||
# Add this python dict to json array
|
||||
jsonArray.append(row)
|
||||
|
||||
# Convert python jsonArray to JSON String and write to file
|
||||
with open(jsonFilePath, 'w', encoding='utf-8') as jsonf:
|
||||
jsonString = json.dumps(jsonArray, indent=4)
|
||||
jsonf.write(jsonString)
|
||||
return True
|
||||
|
||||
def csv_to_json2(csv_file_path, json_file_path):
|
||||
"""Read from a csv file, write to json file (assumes a row 'No' is primary key), returns bool EXPERIMENT"""
|
||||
# Create a dictionary
|
||||
data = {}
|
||||
|
||||
# Open a csv reader called DictReader
|
||||
with open(csv_file_path, encoding='utf-8') as csvf:
|
||||
csvReader = csv.DictReader(csvf)
|
||||
|
||||
# Convert each row into a dictionary and add it to data
|
||||
for rows in csvReader:
|
||||
# Assuming a column named 'No' to be the primary key
|
||||
key = rows['No']
|
||||
data[key] = rows
|
||||
|
||||
# Open a json writer, and use the json.dumps() function to dump data
|
||||
with open(json_file_path, 'w', encoding='utf-8') as jsonf:
|
||||
jsonf.write(json.dumps(data, indent=4))
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
sch_name = sys.argv[1]
|
||||
sch_file = sch_name + '-schema.json'
|
||||
sch = json.loads(open(sch_file).read())
|
||||
if validate_json(d, sch):
|
||||
generate_csv_from_schema(sch, sch_name + '-template.csv')
|
||||
else:
|
||||
print("Validation error: ", sch_name)
|
Loading…
Reference in New Issue