JSON ingestion updates
parent
c700652863
commit
634b3fb512
|
@ -86,3 +86,40 @@ jQuery-UI
|
|||
externally maintained libraries used by this software which have their
|
||||
own licenses; we recommend you read them, as their terms may differ from
|
||||
the terms above.
|
||||
|
||||
|
||||
stream-json
|
||||
This library is available under the terms of the modified BSD license. No external contributions
|
||||
are allowed under licenses which are fundamentally incompatible with the BSD license that this library is distributed under.
|
||||
|
||||
The text of the BSD license is reproduced below.
|
||||
|
||||
-------------------------------------------------------------------------------
|
||||
The "New" BSD License:
|
||||
**********************
|
||||
|
||||
Copyright (c) 2005-2018, Eugene Lazutkin
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
* Neither the name of Eugene Lazutkin nor the names of other contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -60,12 +60,13 @@
|
|||
"jquery": "^3.2.1",
|
||||
"linkurious": "^1.5.1",
|
||||
"mustache": "^2.3.0",
|
||||
"neo4j-driver": "^1.5.2",
|
||||
"neo4j-driver": "^1.6.2",
|
||||
"react": "^16.2.0",
|
||||
"react-bootstrap": "^0.32.0",
|
||||
"react-dom": "^16.2.0",
|
||||
"react-if": "^2.1.0",
|
||||
"react-transition-group": "^2.2.1",
|
||||
"stream-json": "^1.1.0",
|
||||
"unzipper": "^0.8.9"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -540,6 +540,12 @@ export default class GraphContainer extends Component {
|
|||
var id = data.identity.low;
|
||||
var type = data.labels[0];
|
||||
var label = data.properties.name;
|
||||
var guid = data.properties.guid;
|
||||
|
||||
if (label == null){
|
||||
label = guid;
|
||||
}
|
||||
|
||||
var node = {
|
||||
id: id,
|
||||
type: type,
|
||||
|
|
|
@ -1,15 +1,20 @@
|
|||
import React, { Component } from 'react';
|
||||
import MenuButton from './MenuButton';
|
||||
import ProgressBarMenuButton from './ProgressBarMenuButton';
|
||||
import { buildDomainProps, buildSessionProps, buildLocalAdminProps, buildGroupMembershipProps, buildACLProps, findObjectType, buildStructureProps, buildGplinkProps} from 'utils';
|
||||
import { buildGpoAdminJson, buildSessionJson, buildUserJson,buildComputerJson, buildDomainJson, buildGpoJson, buildGroupJson, buildOuJson, buildDomainProps, buildSessionProps, buildLocalAdminProps, buildGroupMembershipProps, buildACLProps, findObjectType, buildStructureProps, buildGplinkProps} from 'utils';
|
||||
import { If, Then, Else } from 'react-if';
|
||||
const { dialog, clipboard, app } = require('electron').remote;
|
||||
const { dialog, app } = require('electron').remote;
|
||||
var fs = require('fs');
|
||||
var async = require('async');
|
||||
var unzip = require('unzipper');
|
||||
var fpath = require('path');
|
||||
var csv = require('fast-csv');
|
||||
|
||||
const Pick = require('stream-json/filters/Pick');
|
||||
const {streamArray} = require('stream-json/streamers/StreamArray');
|
||||
const {chain} = require('stream-chain');
|
||||
const Asm = require('stream-json/Assembler');
|
||||
|
||||
export default class MenuContainer extends Component {
|
||||
constructor(){
|
||||
super();
|
||||
|
@ -74,7 +79,7 @@ export default class MenuContainer extends Component {
|
|||
this.unzipNecessary(fileNames).then(function(results){
|
||||
async.eachSeries(results, function(file, callback){
|
||||
emitter.emit('showAlert', 'Processing file {}'.format(file.name));
|
||||
this.processFile(file.path, callback);
|
||||
this.getFileMeta(file.path, callback);
|
||||
}.bind(this),
|
||||
function done(){
|
||||
setTimeout(function(){
|
||||
|
@ -121,7 +126,92 @@ export default class MenuContainer extends Component {
|
|||
emitter.emit('showAbout');
|
||||
}
|
||||
|
||||
processFile(file, callback){
|
||||
getFileMeta(file, callback){
|
||||
let acceptableTypes = ["sessions","ous","groups","gpoadmins","gpos","computers","users","domains"];
|
||||
let count;
|
||||
let type;
|
||||
|
||||
console.log(file)
|
||||
|
||||
let pipeline = chain([
|
||||
fs.createReadStream(file, {encoding: 'utf8'}),
|
||||
Pick.withParser({filter:'meta'})
|
||||
]);
|
||||
|
||||
let asm = Asm.connectTo(pipeline);
|
||||
asm.on('done', function(asm){
|
||||
let data = asm.current
|
||||
count = data.count
|
||||
type = data.type
|
||||
|
||||
if (!acceptableTypes.includes(type)){
|
||||
emitter.emit('showAlert', 'Unrecognized JSON Type');
|
||||
callback();
|
||||
}
|
||||
|
||||
this.processJson(file, callback, count, type)
|
||||
}.bind(this))
|
||||
|
||||
}
|
||||
|
||||
processJson(file, callback, count, type){
|
||||
let pipeline = chain([
|
||||
fs.createReadStream(file, {encoding: 'utf8'}),
|
||||
Pick.withParser({filter:type}),
|
||||
streamArray()
|
||||
])
|
||||
|
||||
let localcount = 0;
|
||||
let sent = 0;
|
||||
let chunk = []
|
||||
//Start a timer for fun
|
||||
console.time('IngestTime')
|
||||
|
||||
pipeline.on('data', async function(data){
|
||||
chunk.push(data.value)
|
||||
localcount++;
|
||||
|
||||
if (localcount % 100 === 0){
|
||||
pipeline.pause();
|
||||
await this.uploadDataNew(chunk, type)
|
||||
sent += chunk.length;
|
||||
this.setState({
|
||||
progress: Math.floor(sent / count * 100)
|
||||
});
|
||||
chunk = []
|
||||
pipeline.resume();
|
||||
}
|
||||
|
||||
}.bind(this)).on('end', async function(){
|
||||
await this.uploadDataNew(chunk, type)
|
||||
this.setState({progress:100});
|
||||
emitter.emit('refreshDBData');
|
||||
console.timeEnd('IngestTime');
|
||||
callback()
|
||||
}.bind(this))
|
||||
}
|
||||
|
||||
//DO NOT USE THIS FUNCTION FOR ANYTHING, ITS ONLY FOR TESTING
|
||||
sleep_test(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async uploadDataNew(chunk, type){
|
||||
let session = driver.session();
|
||||
let funcMap = {'computers' : buildComputerJson, 'domains': buildDomainJson, 'gpos': buildGpoJson, 'users': buildUserJson,
|
||||
'groups': buildGroupJson, 'ous': buildOuJson, 'sessions': buildSessionJson, 'gpoadmins':buildGpoAdminJson}
|
||||
let data = funcMap[type](chunk)
|
||||
|
||||
for (let key in data){
|
||||
await session.run(data[key].statement, {props: data[key].props}).catch(function(error){
|
||||
console.log(error)
|
||||
})
|
||||
}
|
||||
|
||||
session.close();
|
||||
}
|
||||
|
||||
processFileOld(file, callback){
|
||||
console.log(file);
|
||||
var count = 0;
|
||||
var header = "";
|
||||
|
@ -207,6 +297,8 @@ export default class MenuContainer extends Component {
|
|||
}.bind(this));
|
||||
}
|
||||
|
||||
|
||||
|
||||
async uploadData(currentChunk, filetype, total){
|
||||
var index = 0;
|
||||
var processed;
|
||||
|
|
370
src/js/utils.js
370
src/js/utils.js
|
@ -584,6 +584,376 @@ export function buildGplinkProps(rows){
|
|||
return datadict;
|
||||
}
|
||||
|
||||
function processAceArray(array, objname, objtype, output){
|
||||
let baseAceQuery = 'UNWIND {props} AS prop MERGE (a:{} {name:prop.principal}) MERGE (b:{} {name: prop.obj}) MERGE (a)-[r:{} {isacl:true}]->(b)'
|
||||
|
||||
$.each(array, function(_, ace){
|
||||
let principal = ace.PrincipalName;
|
||||
let principaltype = ace.PrincipalType;
|
||||
let right = ace.RightName;
|
||||
let acetype = ace.AceType;
|
||||
|
||||
if (objname === principal){
|
||||
return;
|
||||
}
|
||||
|
||||
let rights = []
|
||||
|
||||
//Process the right/type to figure out the ACEs we need to add
|
||||
if (acetype === 'All'){
|
||||
rights.push('AllExtendedRights');
|
||||
}else if (acetype === 'User-Force-Change-Password'){
|
||||
rights.push('ForceChangePassword');
|
||||
}else if (acetype === 'Member'){
|
||||
rights.push('AddMember');
|
||||
}else if (right === 'ExtendedRight'){
|
||||
rights.push(acetype);
|
||||
}
|
||||
|
||||
if (right.includes('GenericAll')){
|
||||
rights.push('GenericAll');
|
||||
}
|
||||
|
||||
if (right.includes('WriteDacl')){
|
||||
rights.push('WriteDacl');
|
||||
}
|
||||
|
||||
if (right.includes('WriteOwner')){
|
||||
rights.push('WriteOwner');
|
||||
}
|
||||
|
||||
if (right.includes('GenericWrite')){
|
||||
rights.push('GenericWrite');
|
||||
}
|
||||
|
||||
if (right === 'Owner'){
|
||||
rights.push('Owns');
|
||||
}
|
||||
|
||||
$.each(rights, function(_, right){
|
||||
let hash = right + principaltype;
|
||||
let formatted = baseAceQuery.format(principaltype.toTitleCase(), objtype, right);
|
||||
|
||||
insert(output, hash, formatted, {principal:principal,obj:objname});
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export function buildDomainJson(chunk){
|
||||
let queries = {}
|
||||
queries.properties = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:Domain {name:prop.name}) SET n += prop.map",
|
||||
props: []
|
||||
};
|
||||
|
||||
queries.links = {
|
||||
statement: 'UNWIND {props} as prop MERGE (n:Domain {name:prop.domain}) MERGE (m:GPO {name:prop.gpo}) MERGE (m)-[r:GpLink {enforced:prop.enforced, isacl:false}]->(n)',
|
||||
props: []
|
||||
}
|
||||
|
||||
queries.trusts = {
|
||||
statement: 'UNWIND {props} AS prop MERGE (n:Domain {name: prop.a}) MERGE (m:Domain {name: prop.b}) MERGE (n)-[:TrustedBy {trusttype : prop.trusttype, transitive: prop.transitive, isacl:false}]->(m)',
|
||||
props: []
|
||||
}
|
||||
|
||||
queries.childous = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:Domain {name:prop.domain}) MERGE (m:OU {guid:prop.guid}) MERGE (n)-[r:Contains]->(m)",
|
||||
props : []
|
||||
}
|
||||
|
||||
queries.computers = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:Domain {name:prop.domain}) MERGE (m:Computer {name:prop.comp}) MERGE (n)-[r:Contains]->(m)",
|
||||
props:[]
|
||||
}
|
||||
|
||||
queries.users = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:Domain {name:prop.domain}) MERGE (m:User {name:prop.user}) MERGE (n)-[r:Contains]->(m)",
|
||||
props:[]
|
||||
}
|
||||
|
||||
$.each(chunk, function(_, domain){
|
||||
let name = domain.Name;
|
||||
let properties = domain.Properties;
|
||||
|
||||
queries.properties.props.push({map:properties, name:name});
|
||||
|
||||
let links = domain.Links;
|
||||
$.each(links, function(_, link){
|
||||
let enforced = link.IsEnforced;
|
||||
let target = link.Name;
|
||||
|
||||
queries.links.props.push({domain:name, gpo:target,enforced:enforced});
|
||||
});
|
||||
|
||||
let trusts = domain.Trusts;
|
||||
$.each(trusts, function(_, trust){
|
||||
let target = trust.TargetName;
|
||||
let transitive = trust.IsTransitive;
|
||||
let direction = trust.TrustDirection;
|
||||
let type = trust.TrustType;
|
||||
|
||||
switch (direction){
|
||||
case 0:
|
||||
queries.trusts.props.push({a: target, b: name, transitive: transitive, trusttype: type});
|
||||
break;
|
||||
case 1:
|
||||
queries.trusts.props.push({a: name, b: target, transitive: transitive, trusttype: type});
|
||||
break;
|
||||
case 2:
|
||||
queries.trusts.props.push({a: name, b: target, transitive: transitive, trusttype: type});
|
||||
queries.trusts.props.push({a: target, b: name, transitive: transitive, trusttype: type});
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
let aces = domain.Aces;
|
||||
processAceArray(aces, name, "Domain", queries);
|
||||
|
||||
let childous = domain.ChildOus;
|
||||
|
||||
$.each(childous, function(_, ou){
|
||||
queries.childous.props.push({domain:name, guid:ou})
|
||||
})
|
||||
|
||||
let comps = domain.Computers;
|
||||
$.each(comps, function(_, computer){
|
||||
queries.computers.props.push({domain:name, comp:computer})
|
||||
})
|
||||
|
||||
let users = domain.Users
|
||||
$.each(users, function(_, user){
|
||||
queries.users.props.push({domain: name, user:user});
|
||||
});
|
||||
});
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
export function buildGpoJson(chunk){
|
||||
let queries = {}
|
||||
queries.properties = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:GPO {name:prop.name}) SET n.guid=prop.guid",
|
||||
props: []
|
||||
}
|
||||
|
||||
$.each(chunk, function(_, gpo){
|
||||
let name = gpo.Name;
|
||||
let guid = gpo.Guid;
|
||||
queries.properties.props.push({name:name, guid:guid});
|
||||
|
||||
let aces = gpo.Aces;
|
||||
processAceArray(aces, name, "GPO", queries);
|
||||
});
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
export function buildGroupJson(chunk){
|
||||
let queries = {}
|
||||
queries.properties = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:Group {name:prop.name}) SET n += prop.map",
|
||||
props: []
|
||||
}
|
||||
|
||||
let baseStatement = "UNWIND {props} AS prop MERGE (n:Group {name: prop.name}) MERGE (m:{} {name:prop.member}) MERGE (m)-[r:MemberOf]->(n)";
|
||||
|
||||
$.each(chunk, function(_, group){
|
||||
let name = group.Name;
|
||||
let properties = group.Properties;
|
||||
|
||||
queries.properties.props.push({map:properties, name:name});
|
||||
|
||||
let aces = group.Aces;
|
||||
processAceArray(aces, name, "Group", queries);
|
||||
|
||||
let members = group.Members;
|
||||
$.each(members, function(_, member){
|
||||
let mname = member.MemberName;
|
||||
let mtype = member.MemberType;
|
||||
|
||||
let statement = baseStatement.format(mtype.toTitleCase())
|
||||
insert(queries, mtype, statement, {name: name, member: mname})
|
||||
});
|
||||
});
|
||||
|
||||
return queries
|
||||
}
|
||||
|
||||
export function buildOuJson(chunk){
|
||||
let queries = {};
|
||||
|
||||
queries.properties = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:OU {guid:prop.guid}) SET n += prop.map",
|
||||
props: []
|
||||
}
|
||||
|
||||
queries.childous = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:OU {guid:prop.parent}) MERGE (m:OU {guid:prop.child}) MERGE (n)-[r:Contains]->(m)",
|
||||
props : []
|
||||
}
|
||||
|
||||
queries.computers = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:OU {guid:prop.ou}) MERGE (m:Computer {name:prop.comp}) MERGE (n)-[r:Contains]->(m)",
|
||||
props:[]
|
||||
}
|
||||
|
||||
queries.users = {
|
||||
statement: "UNWIND {props} AS prop MERGE (n:OU {guid:prop.ou}) MERGE (m:User {name:prop.user}) MERGE (n)-[r:Contains]->(m)",
|
||||
props:[]
|
||||
}
|
||||
|
||||
$.each(chunk, function(_, ou){
|
||||
let guid = ou.Guid;
|
||||
let properties = ou.Properties;
|
||||
|
||||
queries.properties.props.push({guid:guid, map: properties});
|
||||
|
||||
let childous = ou.ChildOus;
|
||||
$.each(childous, function(_, cou){
|
||||
queries.childous.props.push({parent: guid, child: cou});
|
||||
})
|
||||
|
||||
let computers = ou.Computers;
|
||||
$.each(computers, function(_, computer){
|
||||
queries.computers.props.push({ou:guid, comp:computer})
|
||||
})
|
||||
|
||||
let users = ou.Users
|
||||
$.each(users, function(_, user){
|
||||
queries.users.props.push({ou: guid, user:user});
|
||||
});
|
||||
})
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
export function buildSessionJson(chunk){
|
||||
let queries = {}
|
||||
queries.sessions = {
|
||||
statement:"UNWIND {props} AS prop MERGE (n:User {name:prop.user}) MERGE (m:Computer {name:prop.comp}) MERGE (m)-[r:HasSession {weight: prop.weight, isacl:false}]->(n)",
|
||||
props: []
|
||||
}
|
||||
|
||||
$.each(chunk, function(_, session){
|
||||
let name = session.UserName;
|
||||
let comp = session.ComputerName;
|
||||
let weight = session.Weight;
|
||||
|
||||
queries.sessions.props.push({user: name, comp: comp, weight: weight})
|
||||
})
|
||||
return queries;
|
||||
}
|
||||
|
||||
export function buildGpoAdminJson(chunk){
|
||||
let queries = {}
|
||||
|
||||
let baseQuery = "UNWIND {props} AS prop MERGE (n:{} {name:prop.admin}) MERGE (m:Computer {name:prop.comp}) MERGE (n)-[r:AdminTo {isacl:false}]->(m)"
|
||||
$.each(chunk, function(_, gpoadmin){
|
||||
let comp = gpoadmin.Computer;
|
||||
let admin = gpoadmin.Name;
|
||||
let type = gpoadmin.Type;
|
||||
|
||||
let query = baseQuery.format(type.toTitleCase());
|
||||
insert(queries, type, query, {admin: admin, comp:comp})
|
||||
});
|
||||
|
||||
return queries;
|
||||
}
|
||||
|
||||
export function buildUserJson(chunk){
|
||||
let queries = {}
|
||||
|
||||
$.each(chunk, function(_, user){
|
||||
let name = user.Name;
|
||||
let properties = user.Properties;
|
||||
let primarygroup = user.PrimaryGroup;
|
||||
|
||||
if (!queries.properties){
|
||||
if (primarygroup === null){
|
||||
queries.properties = {
|
||||
statement:"UNWIND {props} AS prop MERGE (n:User {name:prop.name}) SET n += prop.map",
|
||||
props:[]
|
||||
}
|
||||
}else{
|
||||
queries.properties = {
|
||||
statement:"UNWIND {props} AS prop MERGE (n:User {name:prop.name}) MERGE (m:Group {name:prop.pg}) MERGE (n)-[r:MemberOf {isacl:false}]->(m) SET n += prop.map",
|
||||
props:[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
queries.properties.props.push({map:properties, name:name, pg: primarygroup});
|
||||
|
||||
let aces = user.Aces;
|
||||
processAceArray(aces, name, "User", queries);
|
||||
});
|
||||
return queries
|
||||
}
|
||||
|
||||
export function buildComputerJson(chunk){
|
||||
let queries = {}
|
||||
let baseQuery = "UNWIND {props} AS prop MERGE (n:Computer {name:prop.name}) MERGE (m:{} {name:prop.target}) MERGE (m)-[r:{}]->(n)"
|
||||
|
||||
$.each(chunk, function(_, comp){
|
||||
let name = comp.Name;
|
||||
let properties = comp.Properties;
|
||||
let localadmins = comp.LocalAdmins;
|
||||
let rdpers = comp.RemoteDesktopUsers;
|
||||
let primarygroup = comp.PrimaryGroup;
|
||||
|
||||
if (!queries.properties){
|
||||
if (primarygroup === null){
|
||||
queries.properties = {
|
||||
statement:"UNWIND {props} AS prop MERGE (n:Computer {name:prop.name}) SET n += prop.map",
|
||||
props:[]
|
||||
}
|
||||
}else{
|
||||
queries.properties = {
|
||||
statement:"UNWIND {props} AS prop MERGE (n:Computer {name:prop.name}) MERGE (m:Group {name:prop.pg}) MERGE (n)-[r:MemberOf]->(m) SET n += prop.map",
|
||||
props:[]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
queries.properties.props.push({map:properties, name:name, pg: primarygroup});
|
||||
$.each(localadmins, function(_, admin){
|
||||
let aType = admin.Type;
|
||||
let aName = admin.Name;
|
||||
let rel = "AdminTo";
|
||||
|
||||
let hash = rel+aType;
|
||||
|
||||
let statement = baseQuery.format(aType, rel);
|
||||
let p = {name: name, target: aName};
|
||||
insert(queries,hash,statement,p);
|
||||
})
|
||||
|
||||
$.each(rdpers, function(_, rdp){
|
||||
let aType = rdp.Type;
|
||||
let aName = rdp.Name;
|
||||
let rel = "CanRDP";
|
||||
|
||||
let hash = rel+aType;
|
||||
|
||||
let statement = baseQuery.format(aType, rel);
|
||||
let p = {name: name, target: aName};
|
||||
insert(queries,hash,statement,p);
|
||||
})
|
||||
});
|
||||
return queries
|
||||
}
|
||||
|
||||
function insert(obj, hash, statement, prop){
|
||||
if (obj[hash]){
|
||||
obj[hash].props.push(prop)
|
||||
}else{
|
||||
obj[hash] = {}
|
||||
obj[hash].statement = statement;
|
||||
obj[hash].props = []
|
||||
obj[hash].props.push(prop)
|
||||
}
|
||||
}
|
||||
|
||||
export function buildACLProps(rows) {
|
||||
var datadict = {};
|
||||
|
||||
|
|
Loading…
Reference in New Issue