Initial commit: Add Technopark scraper project
This commit is contained in:
parent
968199528d
commit
04d32af29c
507
companies.csv
Normal file
507
companies.csv
Normal file
@ -0,0 +1,507 @@
|
||||
Company Name,Company Website
|
||||
"ALLIANZ SERVICES (P) LTD (FORMERLY KNOWN AS ALLIANZ CORNHILL INFORMATION SERVICES (P) LTD)","http://www.allianz.com/"
|
||||
"EYME TECHNOLOGIES (P) LTD","http://www.ey.com/"
|
||||
"H&R BLOCK INDIA (P) LTD","http://www.hrblock.com/"
|
||||
"IBS SOFTWARE (P) LTD","https://www.ibsplc.com/"
|
||||
"INFOSYS LTD","http://www.infosys.com/"
|
||||
"NISSAN DIGITAL INDIA LLP","http://www.nissandigitalindia.com/"
|
||||
"ORACLE INDIA","http://www.oracle.com/"
|
||||
"QUEST GLOBAL ENGINEERING SERVICES (P) LTD","http://www.quest-global.com/"
|
||||
"RM EDUCATION SOLUTIONS INDIA (P) LTD","http://www.rmesi.co.in/"
|
||||
"STACKGENIE CONSULTING (P) LTD","https://stackgenie.io/"
|
||||
"SUNTEC BUSINESS SOLUTIONS (P) LTD","http://www.suntecgroup.com/"
|
||||
"TATA CONSULTANCY SERVICES LTD","http://www.tcs.com/"
|
||||
"TATA ELXSI","http://www.tataelxsi.com/"
|
||||
"UST GLOBAL","http://www.ust.com/"
|
||||
"1074 VECTORS","http://www.1074vectors.com/"
|
||||
"3MBK TECHNOLOGIES (P) LTD","http://www.3mbk.com/"
|
||||
"4LABS TECHNOLOGIES (P) LTD","https://www.4labsinc.com/"
|
||||
"AABASOFT TECHNOLOGIES INDIA (P) LTD","http://www.aabasoft.com/"
|
||||
"AADILTECH SOLUTIONS PVT. LTD","https://aadiltech.com/"
|
||||
"AANEETA KPO (P) LTD","http://www.aaneeta.com/"
|
||||
"AARTISTUDE INNOVATIONS (P) LTD","http://artistude.com/"
|
||||
"ACCUBITS TECHNOLOGIES (P) LTD","http://www.accubits.com/"
|
||||
"ACHARYA PANCHAKARMA AND TRADITIONAL KERALA AYURVEDA PRIVATE LIMITED","https://socialwings.in/"
|
||||
"ACROCEDE TECHNOLOGIES (P) LTD","http://www.acrocede.com/"
|
||||
"ACSIA TECHNOLOGIES (P) LTD","http://www.acsiatech.com/"
|
||||
"ADDANT SYSTEMS (P) LTD","http://www.addant.com/"
|
||||
"ADISYS RESEARCH AND DEVELOPMENT INDIA (P) LTD","http://www.adisysindia.com/"
|
||||
"ADROITMINDS SOFTWARE LABS","http://www.adroitminds.com/"
|
||||
"ADVITECH TECHNOLOGIES (P) LTD","https://advitech.in/"
|
||||
"AEDGE AICC INDIA (P) LTD (ARMADA)","https://www.armada.ai/"
|
||||
"AEISTIR TECHNOLOGIES (P) LTD","http://www.aeistir.com/"
|
||||
"AINSURTECH (P) LTD","https://www.ainsurtek.com/"
|
||||
"AKIRA SOFTWARE SOLUTIONS (P) LTD","http://www.akiraplc.com/"
|
||||
"ALAMY IMAGES INDIA (P) LTD","http://www.alamy.com/"
|
||||
"ALLIANZ SERVICES (P) LTD (FORMERLY KNOWN AS ALLIANZ CORNHILL INFORMATION SERVICES (P) LTD)","http://www.allianz.com/"
|
||||
"ALLIANZ TECHNOLOGY SE","https://tech.allianz.com/en.html"
|
||||
"ALOKIN SOFTWARE (P) LTD","http://www.alokin.in/"
|
||||
"ALP TURNKEY SOLUTIONS (P) LTD","https://www.alp-ts.com/"
|
||||
"ALTAIRE INSIGHTS TECHNOLOGIES (P) LTD","https://www.altaire-insights.com/"
|
||||
"ALZONE SOFTWARE (P) LTD","http://www.alzonesoftware.com/"
|
||||
"AMEDISOFT TECHNOLOGIES (P) LTD","http://www.amedisoft.com/"
|
||||
"AMSTOR CONSULTANCY SERVICES (P) LTD","https://www.amstortech.com/"
|
||||
"AMSTOR INFORMATION TECHNOLOGY INDIA (P) LTD","http://www.amstorglobal.com/"
|
||||
"ANALYSE DIGITAL BUSINESS SOLUTIONS (P) LTD","http://www.analysedigital.com/"
|
||||
"ANALYSTOR TECHNOLOGIES PVT .LTD","https://www.analystortech.com/"
|
||||
"AOT TECHNOLOGIES (P) LTD","http://www.aot-technologies.com/"
|
||||
"APPFABS INNOVATION (P) LTD","https://www.appfabs.ca/"
|
||||
"APPLEXUS TECHNOLOGIES (P) LTD","http://www.applexus.com/"
|
||||
"APPS TEAM TECHNOLOGIES PVT LTD","https://appsteamtechnologies.com/"
|
||||
"APPSIOLOGIX BUSINESS SOLUTIONS (P) LTD","https://www.kerala.gov.in/"
|
||||
"APPSTATION (P) LTD","http://www.appstation.in/"
|
||||
"APTARA LEARNING (P) LTD","http://www.aptaracorp.com/"
|
||||
"ARCHEMEY DIGITAL PRIVATE LIMITED","https://archemey.com/"
|
||||
"ARGENT SOFTWARE INC","http://www.argent.com/"
|
||||
"ARS TRAFFIC & TRANSPORT TECHNOLOGY INDIA (P) LTD","http://www.ars-traffic.com/"
|
||||
"ARTEMAS TECHNOLOGIES (P) LTD","http://www.artemasdigital.com/"
|
||||
"ASIANET SATELLITE COMMUNICATIONS LTD","https://www.kerala.gov.in/"
|
||||
"ASSURETECH BUSINESS SOLUTIONS (P) LTD","http://www.assuretech.in/"
|
||||
"ATEAM INFOSOFT SOLUTIONS PRIVATE LIMITED","https://www.ateamsoftsolutions.com/"
|
||||
"ATEN INDIA SOFTWARE SOLUTIONS (P) LTD","http://www.ateninc.com/"
|
||||
"ATSERV TECHNOLOGIES INDIA (P) LTD","https://go.trader.ca/"
|
||||
"AURIONPRO SOLUTIONS LTD","https://www.aurionpro.com/"
|
||||
"AUTRAM INFOTECH (P) LTD","http://www.autram.com/"
|
||||
"AVERNA TEST INDIA (P) LTD","http://www.averna.com/"
|
||||
"AWDAR SOFTWARE SOLUTIONS LLP","https://ainexit.com/"
|
||||
"AXEN SOFTWARE (P) LTD","http://www.axensoft.com/"
|
||||
"AZINOVA TECHNOLOGIES (P) LTD","http://www.azinovatechnologies.com/"
|
||||
"AZIZ INDUSTIRES CORPORATION","https://www.aicorp.co.in/"
|
||||
"BEAGLE CYBER INNOVATIONS (P) LTD","https://beaglesecurity.com/"
|
||||
"BETTERBEANS CONSULTANTS (P) LTD","https://betterbeans.in/"
|
||||
"BEYONGEN ACUMEN (P) LTD","https://thebga.io/"
|
||||
"BILAGOS SOFTWARES (P) LTD","http://www.bilagos.com/"
|
||||
"BIMAGENIE INSURANCE BROKING PVT. LTD","https://www.bimagenie.com/"
|
||||
"BIMLABS STUDIO PVT LTD","https://bimstudio.net/"
|
||||
"BITRYT SOLUTIONS (P) LTD","http://www.bitryt.com/"
|
||||
"BITSNPIXS TECHNOLOGIES (P) LTD","https://www.bitsnpixs.com/"
|
||||
"BOXOP SOLUTIONS INDIA (P) LTD","http://www.boxop.in/"
|
||||
"BRANDFELL TECHNOLOGIES (P) LTD","http://www.brandfell.com/"
|
||||
"BREEZE BRAIN TECHNOLOGIES (P) LTD","https://www.breezebrain.com/"
|
||||
"BST SOFTWARE (P) LTD","http://bstsoftwarelabs.in/"
|
||||
"BTREE IOT TECHNOLOGIES (P) LTD","https://www.btree.in/"
|
||||
"BUILDNEXT CONSTRUCTION SOLUTIONS (P) LTD","http://www.buildnext.in/"
|
||||
"BYTEWAVE DIGITAL CONSULTANTS (P) LTD","http://www.bytewavedigital.com/"
|
||||
"CALMOVE TECHNOLOGIES (P) LTD","http://www.calmove.com/"
|
||||
"CANKADO INDIA (P) LTD","http://www.cankado.com/"
|
||||
"CANZA TECHNOLOGY CONSULTANTS (P) LTD","https://www.canzatech.com/"
|
||||
"CARMIC TECH SOLUTIONS (P) LTD","http://www.carmictech.com/"
|
||||
"CARNIVAL TECHNOPARK (P) LTD","http://www.groupcarnival.com/"
|
||||
"CASTEL TECHNOLOGIES (P) LTD","http://www.casteltech.com/"
|
||||
"CATALYCA (P) LTD","https://catalyca.com/"
|
||||
"CENTELON CONSULTANCY SERVICES (P) LTD","https://www.centelonsolutions.com/"
|
||||
"CENTRE FOR DEVELOPMENT OF ADVANCED COMPUTING [CDAC]","http://www.cdac.in/"
|
||||
"CEPTIGEN TECHNOLOGIES (P) LTD","http://www.ceptigen.com/"
|
||||
"CHORDIFY TECH INDIA (P) LTD","https://chordify.com/"
|
||||
"CIBER DIGITA CONSULTANTS LLP","http://www.cdcllp.com/"
|
||||
"CINCH BUSINESS SOLUTIONS (P) LTD","http://www.cinchplc.com/"
|
||||
"CLEAREYE.AI (P) LTD (FORMERLY KNOWN AS AIWARE TECHNOLOGY SYSTEMS (P) LTD)","http://www.cleareye.ai/"
|
||||
"CLINIPACE CLINICAL RESEARCH (P) LTD","https://www.caidya.com/"
|
||||
"CLOUD NAUTICAL SOLUTIONS INDIA PVT LTD","https://www.cloudnautical.com/"
|
||||
"CLOUDPLUS INFORMATION TECHNOLOGIES PVT LTD","http://www.cloudplusinfotech.com/"
|
||||
"CLOUDQ IT SERVICES (P) LTD","http://www.cloudq.net/"
|
||||
"CODEOFDUTY INNOVATIONS (P) LTD","https://www.codeofduty.in/"
|
||||
"COGNOSIN (P) LTD","http://www.cognosin.com/"
|
||||
"COLLABORATIVE LEARNING LABS (P) LTD","http://www.collabll.com/"
|
||||
"CONFIANZ INFORMATION TECHNOLOGIES (P) LTD","https://www.confianzit.com/"
|
||||
"CONSCIENCE BUSINESS SOLUTIONS (P) LTD","http://www.conscience.co.in/"
|
||||
"CRAYO TECH BUSINESS SOLUTIONS (P) LTD","http://www.crayotech.com/india"
|
||||
"CREACE TECHNOLOGIES (P) LTD","http://www.creace.in/"
|
||||
"CYBMIRROR INNOVATIONS LLP","http://www.cybmirror.com/"
|
||||
"DAIVIKSOFT TECHNOLOGIES PRIVATE LIMITED","https://www.daiviksoft.com/"
|
||||
"DATARAYS SOLUTIONS (P) LTD","https://www.datarays.in/"
|
||||
"DCUBE AI SYSTEMS & SERVICES (P) LTD","http://www.dcubeai.com/"
|
||||
"DE'ANTZ TECHNOLOGICAL SOLUTIONS (P) LTD","http://www.deantz.com/"
|
||||
"DEVICEDRIVEN INDIA (P) LTD","http://www.devicedriven.com/"
|
||||
"DEZYNEX TECHNOLOGY SOLUTIONS (P) LTD","https://www.dezynex.com/"
|
||||
"DIAGNAL TECHNOLOGIES (P) LTD","https://www.diagnal.com/"
|
||||
"DIGINEST SOLUTIONS (P) LTD","https://www.diginestsolutions.com/"
|
||||
"DOCKING STATION TECHNOLOGIES (P) LTD","http://www.dockingstation.co.in/"
|
||||
"DOUBTBOX EDUTAINMENT (P) LTD","http://www.doubtbox.com/"
|
||||
"DSI TECHNOLOGIES (P) LTD","http://www.dsitechnologies.co.in/"
|
||||
"ECESIS CARE (P) LTD","https://www.ecesistech.com/"
|
||||
"ECOWHITE ARCHITECTURAL ENGINEERING & TECHNOLOGY SERVICES (P) LTD","http://www.ecowhite.co.in/"
|
||||
"ECS BUSINESS SOLUTIONS (P) LTD","https://www.ecsfin.com/"
|
||||
"EDGAADI (P) LTD","https://www.edgaadi.com/"
|
||||
"EDIFY DATASCIENCE (P) LTD","https://edifydata.com/"
|
||||
"EDWIZA SOLUTIONS (P) LTD","http://www.edwiza.com/"
|
||||
"ELECTRONICS TECHNOLOGY PARKS KERALA (TECHNOPARK)","https://technopark.in/"
|
||||
"ELEMENTZ ENGINEERS GUILD (P) LTD","http://www.elementzonline.com/"
|
||||
"ELEMENTZ IT SOLUTIONS (P) LTD","https://www.elementzit.com/"
|
||||
"ELK STORE","http://www.myelkstore.com/"
|
||||
"ELSYS INTELLIGENT DEVICES (P) LTD","http://www.elsyslabs.com/"
|
||||
"EMBEDDED RESEARCH & INNOVATION SOFTWARE LABS (P) LTD","http://www.erislabs.com/"
|
||||
"EMBRIGHT INFOTECH (P) LTD","http://www.embrightinfotech.com/"
|
||||
"EMIN SOFT TECHNOLOGIES (P) LTD","http://www.eminsoftech.com/"
|
||||
"ENFIN TECHNOLOGIES INDIA (P) LTD","http://www.enfintechnologies.com/"
|
||||
"ENTIRESOFT TECHNOLOGIES (P) LTD","http://www.entiresofts.com/"
|
||||
"ENTREGAR SOLUTIONS PVT LIMITED","http://www.entregarsolutions.com/"
|
||||
"ENTRYKEY BUSINESS SOLUTION LLP","https://entrykey.in/"
|
||||
"ENTUITE TECHNOLOGIES (P) LTD","https://entuite.com/"
|
||||
"ENVESTNET ASSET MANAGEMENT INDIA (P) LTD","http://www.envestnet.com/"
|
||||
"EPI FRACTALS BIOSYSTEMS (P) LTD (EFBS)","https://www.epifractals.com/"
|
||||
"ESTRRADO TECHNOLOGY SOLUTIONS (P) LTD","http://www.estrrado.com/"
|
||||
"EUROSIA BIM SERVICES (P) LTD","http://www.eurosia.eu/"
|
||||
"EVALOGICAL (P) LTD","http://www.evalogical.com/"
|
||||
"EXPEREAL TECHNOLOGIES (P) LTD","https://expereal.org/"
|
||||
"EXPERION TECHNOLOGIES (I) (P) LTD","http://www.experionglobal.com/"
|
||||
"EXPEVO DIGITAL SERVICES LLP","https://www.expevo.com/"
|
||||
"EXPONENTIAL DIGITAL SOLUTIONS (P) LTD","http://www.10xds.com/"
|
||||
"EXPRESSARE TECHNOLOGIES (P) LTD","https://expressare.com/"
|
||||
"F12 TECHNOLOGIES (P) LTD","http://www.f12technologies.com/"
|
||||
"FAITH INFOTECH INDIA (P) LTD","https://www.faithinfotechacademy.com/"
|
||||
"FAKEEH TECHNOLOGIES (P) LTD","https://www.fakeeh.tech/"
|
||||
"FAYA INNOVATIONS","http://www.fayausa.com/"
|
||||
"FEATHERSOFT INFO SOLUTIONS (P) LTD","https://www.feathersoft.com/"
|
||||
"FERNS IT SOLUTIONS (P) LTD","https://ferns-it.com/"
|
||||
"FIELDNXT SERVICE SOLUTIONS (P) LTD","http://www.fieldnxt.com/"
|
||||
"FINASTRA (D+H SOLUTIONS INDIA (P) LTD)","http://www.finastra.com/"
|
||||
"FINGENT TECHNOLOGY SOLUTIONS (P) LTD","http://www.fingent.com/"
|
||||
"FISCHER SYSTEMS INDIA (P) LTD","http://www.fisc.com/"
|
||||
"FLYTXT MOBILE SOLUTIONS (P) LTD","https://flytxt.ai/"
|
||||
"FOUNDINGMINDS SOFTWARE (P) LTD","http://www.foundingminds.com/"
|
||||
"FOXDALE (P) LTD","http://www.foxdale.in/"
|
||||
"FRESH MIND IDEAS (P) LTD","http://www.freshmindideas.com/"
|
||||
"FUTURE CYBER TECH (P) LTD","https://futurecybertek.com/"
|
||||
"FUTUREBLOX TECHNOLOGIES (P) LTD","https://www.futureblox.com/"
|
||||
"FXMEDIALAB","http://www.fxmedialab.com/"
|
||||
"FYNAMICS BUSINESS SOLUTIONS (P) LTD","http://www.finline.in/"
|
||||
"G D INNOVATIVE SOLUTIONS (P) LTD","http://www.gdinnovativesolutions.com/"
|
||||
"GAIN INFOSYSTEMS (P) LTD","https://www.gaininfosystems.com/"
|
||||
"GAUDE BUSINESS SOLUTIONS (P) LTD","http://www.gaudesolutions.com/"
|
||||
"GEMINI SOFTWARE SOLUTIONS (P) LTD","http://www.geminisoftware.com/"
|
||||
"GENPRO RESEARCH (P) LTD","https://genproresearch.com/"
|
||||
"GENROBOTIC INNOVATIONS (P) LTD","http://www.genrobotics.org/"
|
||||
"GESCIS TECHNOLOGIES (P) LTD","https://gescis.com/"
|
||||
"GLOBCOM SOLUTIONS (P) LTD","http://www.globcomsolution.com/"
|
||||
"GLOBIFY SOFTWARE SOLUTIONS (P) LTD","https://globify.in/"
|
||||
"GNX DIGITAL SOLUTIONS (P) LTD","https://gnxsolutions.in/"
|
||||
"GOOD METHODS SOFTWARE SOLUTIONS (P) LTD","http://www.carestack.com/"
|
||||
"GREEN ORCHID SOFTWARE SOLUTIONS (P) LTD","http://www.greenorchidplc.com/"
|
||||
"GRID ENGINEERING SERVICES","https://gridengineeringservices.com/"
|
||||
"GROUP OF TECHNOLOGY COMPANIES (GTECH)","http://www.gtechindia.org/"
|
||||
"GROZEO INTERNATIONAL (P) LTD","https://www.grozeo.com/"
|
||||
"GRUPPOZENIT INDIA (P) LTD","http://www.gruppozenit.com/"
|
||||
"GUIDEHOUSE INDIA (P) LTD","http://www.guidehouse.com/regions/india"
|
||||
"HASPACES IT & HOSPITALITY (P) LTD","https://www.haspaces.com/"
|
||||
"HAWK SPARKS DESIGN STUDIO LLP","https://www.hawksparks.in/"
|
||||
"HEPSOFT TECHNOLOGIES (P) LTD","http://www.hepsoft.com/"
|
||||
"HEX20LABS INDIA (P) LTD","https://www.hex20.space/"
|
||||
"HIWORTH SOLUTIONS (P) LTD","http://www.hiworthsolutions.com/"
|
||||
"HODO MEDICAL INFORMATIC SOLUTIONS (P) LTD","https://hodo.in/"
|
||||
"HONEYKODE TECHNOLOGIES (P) LTD","http://www.honeykode.com/"
|
||||
"HORNBILL LABS (P) LTD","https://www.hornbilllabs.com/"
|
||||
"HOSTDIME DATA CENTRE SERVICES (P) LTD","http://hostdime.in/"
|
||||
"HYBCLOUD TECHNOLOGIES (P) LTD (CLOUD CONTROL)","http://www.ecloudcontrol.com/"
|
||||
"HYZ VENTURES INTL (P) LTD","http://www.hyzventures.com/"
|
||||
"I-WORXS BY PALNAR","https://iworxs.de/de"
|
||||
"IANTZ IT SOLUTIONS (P) LTD","http://www.iantz.in/"
|
||||
"IBIL SOLUTIONS (P) LTD","https://www.ibilglobal.com/"
|
||||
"IBOSON INNOVATIONS (P) LTD","http://www.iboson.io/"
|
||||
"ICLOUD9 DIGITAL (P) LTD","http://www.icloud9digital.com/"
|
||||
"ICON CLINICAL RESEARCH INDIA (P) LTD","https://www.iconplc.com/"
|
||||
"ICORE PIONEER BUSINESS SOLUTION (P) LTD","https://icorepioneer.com/"
|
||||
"ICT ACADEMY OF KERALA","http://www.ictkerala.org/"
|
||||
"IDYNAMICS LTD","http://www.idynamics.com/"
|
||||
"IGNOSI ENTERPRISES","http://www.ignosi.in/"
|
||||
"IIC WEB SOLUTIONS (P) LTD","http://www.iicwebsolutions.com/"
|
||||
"ILE ILEARNINGENGINES INDIA (P) LTD","http://www.ilearningengines.com/"
|
||||
"INAPP INFORMATION TECHNOLOGIES INDIA (P) LTD","http://www.inapp.com/"
|
||||
"INAPP INFORMATION TECHNOLOGIES INDIA (P) LTD","http://www.inapp.com/"
|
||||
"INCREDIBLE VISIBILITY SOLUTIONS PRIVATE LIMITED","http://www.incrediblevisibility.com/"
|
||||
"INDIAN INSTITUTE OF INFORMATION TECHNOLOGY AND MANAGEMENT - KERALA (IIITM-K)","http://www.iiitmk.ac.in/"
|
||||
"INDUSTRON NANOTECHNOLOGY (P) LTD","http://www.industronnano.com/"
|
||||
"INEXONSOFT TECHNOLOGIES (P) LTD","https://www.inexonsoft.com/"
|
||||
"INFOBLOX TECHNICAL SUPPORT AND SOFTWARE DEVELOPMENT (P) LTD","http://www.infoblox.com/"
|
||||
"INFOFORTE (P) LTD","https://infoforte.com/"
|
||||
"INFOSPICA CONSULTANCY SERVICES","http://www.infospica.com/"
|
||||
"INFYNITH TECHNOMISSION (P) LTD","http://www.infynith.com/"
|
||||
"INNONEUR IT VENTURES (P) LTD","http://www.innoneur.com/"
|
||||
"INNOVAL DIGITAL SOLUTIONS (P) LTD (IVL)","http://www.ivldsp.com/"
|
||||
"INNOVATION INCUBATOR ADVISORY (P) LTD","https://innovationincubator.com/"
|
||||
"INNOVATISE TECHNOLOGY (P) LTD","http://www.innovatise.com/"
|
||||
"INNURE INFOTECH (P) LTD","http://www.innure.com/"
|
||||
"INOMETRICS TECHNOLOGY SYSTEMS (P) LTD","http://www.inometrics.com/"
|
||||
"INOVACE DIGITAL MEDIA LLP","http://www.inovace.in/"
|
||||
"INSPIRISYS SOLUTIONS LTD","http://www.inspirisys.com/"
|
||||
"INTEGRATED ORDER AUTOMATED SOLUTIONS (P) LTD","http://www.ioas.in/"
|
||||
"INTELLA ECOM IT SOLUTION (P) LTD","http://www.intellaecomits.com/"
|
||||
"INTERLAND TECHNOLOGY SERVICES (P) LTD","http://www.interlandtech.com/"
|
||||
"INTERNATIONAL VIRTUAL ASSISTANCE (P) LTD","http://www.ivasystmes.com/"
|
||||
"INTRIAD INNOVATIONS (P) LTD","http://www.intriadinnovations.com/"
|
||||
"IQMATRIX INFOWAYS SOLUTIONS (P) LTD","http://www.iqmatrix-solutions.com/"
|
||||
"ITRAITZ IT SOLUTIONS (P) LTD","http://www.itraitz.com/"
|
||||
"ITWAGON (P) LTD","https://www.itwagon.com/"
|
||||
"JOOPIUM TECHNOLOGIES (P) LTD","https://joopiumtechnologies.com/"
|
||||
"JUNIORDEC25 (P) LTD","https://www.juniordec25.com/"
|
||||
"KAISEMI CONTROL SYSTEMS (P) LTD","https://kaisemicontrolsystems.com/"
|
||||
"KAMEDA INFOLOGICS (P) LTD","http://www.kamedainfologics.com/"
|
||||
"KAWIKA TECHNOLOGIES (P) LTD","http://www.kawikatechnologies.com/"
|
||||
"KBS TECHNOLOGIES (P) LTD","http://www.kbstechsolutions.com/"
|
||||
"KEFI TECH SOLUTIONS (P) LTD","http://www.kefitech.com/"
|
||||
"KENLAND IT SOLUTIONS (P) LTD","https://www.kenland.in/"
|
||||
"KENNEDYS KOGNITIVE COMPUTING (P) LTD","http://www.kognitivecomputing.com/"
|
||||
"KERALA START-UP MISSION","http://www.startupmission.kerala.gov.in/"
|
||||
"KERALA STATE DATA CENTER","http://www.itmission.kerala.gov.in/"
|
||||
"KERALA UNIVERSITY OF DIGITAL SCIENCES, INNOVATION AND TECHNOLOGY","https://duk.ac.in/"
|
||||
"KEYVALUE SOFTWARE SYSTEMS (P) LTD","https://www.keyvalue.systems/"
|
||||
"KLYSTRON TECHNOLOGIES","https://www.kerala.gov.in/"
|
||||
"KNITT TECHNOLOGIES (P) LTD","https://www.knittglobal.com/"
|
||||
"KRISHNANS EDUTECH LLP","https://www.kerala.gov.in/"
|
||||
"KVALITETA SYSTEMS AND SOLUTIONS (P) LTD","http://www.kvaliteta.in/"
|
||||
"LABGEX INNOVATIONS (P) LTD","https://www.labgex.com/"
|
||||
"LAJU SOFTECH (P) LTD","http://www.lajusoftech.com/"
|
||||
"LAMBDAZEN INDIA (P) LTD","http://www.lzindia.co.in/"
|
||||
"LAW QUBE TECHNOLOGIES (P) LTD","http://www.lawqube.com/"
|
||||
"LEADER IT (P) LTD","http://www.leaderit.in/"
|
||||
"LEAN TRANSITIONS SOLUTIONS","http://www.leantransitionsolutions.com/"
|
||||
"LEKTIK CONSULTING","http://www.lektik.com/"
|
||||
"LETS BUILD (OPC) (P) LTD","https://letsbuild.co/"
|
||||
"LIFE AND OPPORTUNITY SOLUTIONS LLP","https://www.lifeandopportunity.com/"
|
||||
"LIGHTRAINS TECHNOLABS (P) LTD","http://www.lightrains.com/"
|
||||
"LITMUS7 SYSTEMS CONSULTING (P) LTD","http://www.litmus7.com/"
|
||||
"LIVARES TECHNOLOGIES","http://www.livares.com/"
|
||||
"LIVELOCAL E-COMM (P) LTD","https://www.lilo.co.in/"
|
||||
"LIVEPRO ACESOFT PVT LTD","https://www.acesoft.ca/"
|
||||
"LOGIDOTS TECHNOLOGIES (P) LTD","http://www.logidots.com/"
|
||||
"LUMENZE DIGITAL (P) LTD","https://lumenze.com/"
|
||||
"LUMICEL TECHNOLOGIES LLP","http://www.lumicelstudios.com/"
|
||||
"M SQUARED SOFTWARE & SERVICES (P) LTD","http://www.m2india.com/"
|
||||
"M2I","http://www.m2infotek.com/"
|
||||
"MACROSOFT IT SOLUTIONS INDIA (P) LTD","http://www.macrosoftindia.com/"
|
||||
"MAGNICODE SOLUTIONS (P) LTD","http://www.magnificentcode.com/"
|
||||
"MAGNORY (P) LTD","https://www.magnory.com/"
|
||||
"MAGPII INNOVATIONS PRIVATE LIMITED","http://www.magpiitech.com/"
|
||||
"MARVELLOUX APPS (P) LTD","http://www.marvelloux.com/"
|
||||
"MCFADYEN CONSULTING SOFTWARE INDIA (P) LTD","http://www.mcfadyen.com/"
|
||||
"MECHDRIVE INTL (P) LTD","https://www.mechdrive.in/"
|
||||
"MEDISCRIBES INFOTECH (INDIA) LLP","http://www.mediscribes.com/"
|
||||
"MEDISCRIBES INFOTECH INDIA LLP","http://www.mediscribes.com/"
|
||||
"MEDOWA GLOBAL (P) LTD","http://www.medowaglobal.com/"
|
||||
"MERGIOUS IT SOLUTION (P) LTD","http://www.mergious.com/"
|
||||
"METTLE NETWORKS (P) LTD","http://www.mettlenetworks.com/"
|
||||
"MIBIZ CYBER FORENSICS LABORATORY","http://www.mibizsys.com/"
|
||||
"MINDLOGUE TECHNOLOGIES LTD","http://www.mindlogue.ltd/"
|
||||
"MINERVA SPECIAL PURPOSE VEHICLE (P) LTD","https://www.mspv.in/"
|
||||
"MOBATIA TECHNOLOGY (P) LTD","http://www.mobatia.com/"
|
||||
"MOONHIVE (P) LTD","https://moonhive.in/"
|
||||
"MOVYTECH INNOVATIONS (P) LTD","https://movytech.co/"
|
||||
"MUTHOOT MERCANTILE LTD-IT WING","http://www.muthootenterprises.com/"
|
||||
"MUTHOOT PAPPACHAN TECHNOLOGIES LTD","https://www.mptglobal.com/"
|
||||
"MYTSYS SOFTWARE SOLUTIONS (P) LTD","http://www.mytsys.com/"
|
||||
"NAGA INFO SOLUTIONS (P) LTD","http://www.nagainfo.com/"
|
||||
"NASSCOM","https://nasscom.in/"
|
||||
"NAVIGANT BPM INDIA (P) LTD","http://www.navigant.com/"
|
||||
"NEEDSTREET WEB TECHNOLOGIES (P) LTD","http://www.continuouscare.io/"
|
||||
"NEONICZ SOFTWARE SOLUTIONS (P) LTD","http://www.neonicz.com/"
|
||||
"NERVE DIGITAL P LTD.","https://www.nervedigital.net/"
|
||||
"NETCOM SERVICES (P) LTD","https://www.netcomitservices.com/"
|
||||
"NETPRO MEDIA ADVERTISING (P) LTD","http://www.netpromediaindia.com/"
|
||||
"NETROXE IT SOLUTIONS (P) LTD","http://www.netroxe.com/"
|
||||
"NETWALK TECHNOLOGIES INDIA (P) LTD","https://netwalk.de/"
|
||||
"NEUROPLEX (P) LTD","http://www.neuroplex.in/"
|
||||
"NEXO MIRA TECHNOLOGIES (P) LTD","https://nexomira.com/"
|
||||
"NGXP TECHNOLOGIES LLP","https://ngxptechnologies.com/"
|
||||
"NILASOFT (P) LTD","https://nila-soft.com/"
|
||||
"NIRAST SOLUTIONS (P) LTD","http://www.nirasystems.com/"
|
||||
"NKORR TECHNOLOGIES (P) LTD","http://www.nkorrtech.com/"
|
||||
"NOON TECHNOLOGIES (P) LTD","http://www.noontechnologies.com/"
|
||||
"NORATEL INDIA POWER COMPONENTS (P) LTD","http://www.noratel.com/"
|
||||
"NUMENOR ENTERPRISES PRIVATE LIMITED","http://www.numenor.ltd/"
|
||||
"NXTGENIX SOLUTIONS (P) LTD","https://www.nxtgenix.design/"
|
||||
"NXTLIVE TECHNOLOGIES (P) LTD","http://www.nxtlive.com/"
|
||||
"OBERON GLOBAL SOLUTIONS (P) LTD","https://www.oberonglobalsolutions.com/"
|
||||
"ONETIKK CONSALTANTS (P) LTD","https://www.onetikk.org/"
|
||||
"OPTIOLOGIC TECHNOLOGIES (P) LTD","http://www.optiologic.com/"
|
||||
"ORCAPOD CONSULTING SERVICES PVT.LTD","https://universalmarinemedical.com/"
|
||||
"ORDERSTACK (TNGONE SOFTWARE LABS (P) LTD)","http://www.orderstack.io/"
|
||||
"ORIENT CDS PRIVATE LIMITED","https://www.orientcds.com/"
|
||||
"ORISYSINDIA CONSULTANCY SERVICES (P) LTD","http://www.orisys.in/"
|
||||
"OSPYN TECHNOLOGIES LTD","http://www.ospyn.com/"
|
||||
"OZTERN TECHNOLOGY (P) LTD","http://www.oztern.com/"
|
||||
"PACIFIC BPO (P) LTD","https://www.pacificbpo.com/"
|
||||
"PAPERCHASE ACCOUNTANCY INDIA PVT LTD","https://www.paperchase.ac/"
|
||||
"PARANOIA SYSTEMS INTERNATIONAL (P) LTD","http://www.paranoiagroup.com/"
|
||||
"PEARLSOFT TECHNOLOGIES LLP","https://pearlsofttechnologies.com/"
|
||||
"PEMS DIGITAL TECHNOLOGIES (P) LTD","https://www.pemsdigital.com/"
|
||||
"PERFOMATIX SOLUTIONS (P) LTD","http://www.perfomatix.com/"
|
||||
"PHASEAMP LABS (P) LTD","https://www.phaseamplabs.com/"
|
||||
"PHYKON SOLUTIONS (P) LTD","http://www.phykon.com/"
|
||||
"PI-DIGI LOGICAL SOLUTIONS (P) LTD","http://www.pi-digi.com/"
|
||||
"PICK2HEAL LAB LLP","http://www.pick2heal.com/"
|
||||
"PICKY ASSIST (P) LTD","https://www.pickyassist.com/"
|
||||
"PIT SOLUTIONS (P) LTD","http://www.pitsolutions.com/"
|
||||
"PIT SOLUTIONS (P) LTD","http://www.pitsolutions.com/"
|
||||
"PLANNET TECHNEST SOUTH INDIA (P) LTD","https://www.house-of-communication.com/de/en/brands/plan-net.html"
|
||||
"POLUS SOLUTIONS (P) LTD","https://www.polussolutions.com/"
|
||||
"PRAYAN ANIMATION STUDIO (P) LTD","http://www.prayananimation.com/"
|
||||
"PRAYAN INFOTECH (P) LTD","http://www.prayaninfotech.in/"
|
||||
"PRESS GANEY ASSOCIATES INDIA (P) LTD","https://www.pressganey.com/"
|
||||
"PROCHANT INDIA (P) LTD","http://www.prochant.com/"
|
||||
"PROMPTTECH GLOBAL (P) LTD","https://prompttechsolutions.com/"
|
||||
"PROPHAZE TECHNOLOGIES (P) LTD","https://prophaze.com/"
|
||||
"PROPMIX.IO INDIA (P) LTD","https://propmix.io/"
|
||||
"PROTIUM LOGIC IT SERVICES (P) LTD","http://www.protiumlogic.com/"
|
||||
"Q-DEVENGINE (P) LTD","http://www.q-devengine.com/"
|
||||
"QAWEBPRINTS INFOCORP","http://www.qawebprints.com/"
|
||||
"QBURST TECHNOLOGIES (P) LTD","http://www.qburst.com/"
|
||||
"QBURST TECHNOLOGIES (P) LTD","http://www.qburst.com/"
|
||||
"QMSMART TECHNOLOGIES (P) LTD","http://qmsmart.net/"
|
||||
"QUANTIPHI ANALYTICS SOLUTIONS PRIVATE LIMITED","https://quantiphi.com/"
|
||||
"QUIITS TECHNOLOGIES (P) LTD","http://www.quiits.com/"
|
||||
"QUSOL INNOVATIONS INDIA (P) LTD","https://qusolindia.com/"
|
||||
"QWORKS TECHNOLOGIES (P) LTD","http://www.qworks.co/"
|
||||
"QWY SOFTWARE (P) LTD","https://www.kerala.gov.in/"
|
||||
"QWY TECHNOLOGIES (P) LTD","https://qwqer.in/"
|
||||
"REALEFFECX TECHNOLABS LLP","http://www.realeffecx.com/"
|
||||
"REDEFINE (DIVISION OF DNEG INDIA MEDIA SERVICES LTD)","https://redefine.co/"
|
||||
"REFINED IT SOLUTIONS AND MARKETING (P) LTD","http://www.refined.co.in/"
|
||||
"REFLECTIONS INFO SYSTEMS (P) LTD","http://www.reflectionsglobal.com/"
|
||||
"REIZEND (P) LTD","http://www.reizendretail.in/"
|
||||
"REIZEND IT CONSULTANTS","http://www.reizend.in/"
|
||||
"RELAXPLZZ TECHNOLOGIES","https://relaxplzz.com/"
|
||||
"RESBEE INFO TECHNOLOGIES (P) LTD","https://ekhool.com/"
|
||||
"RESFEBER INFOSOLUTIONS (P) LTD","http://www.travelspoc.com/"
|
||||
"RESONANCE ENGINEERING (P) LTD","http://www.resonanceengineering.com/"
|
||||
"REVYRIE GLOBAL","http://www.revyrieglobal.com/"
|
||||
"REWARDS MYSEARCHGLOBAL (P) LTD","http://www.mysearchglobalrewards.com/"
|
||||
"RICHINNOVATIONS TECHNOLOGIES (P) LTD","https://www.richinnovationsplc.com/"
|
||||
"RICHKEN MEDIA (P) LTD","https://richkenmedia.com/"
|
||||
"RIVERSWAVE TECHNOLOGIES (P) LTD","http://www.riverswavetechnologies.com/"
|
||||
"ROOTLET TECHNOLOGIES PVT LTD","http://www.rootlettechnologies.com/"
|
||||
"RR DONNELLEY INDIA OUTSOURCE (P) LTD","http://www.rrdonnelley.com/"
|
||||
"RSGP CONSULTING (P) LTD","https://rsgpconsult.com/"
|
||||
"SAASVAAP TECHIES (P) LTD","https://www.kerala.gov.in/"
|
||||
"SADA SYSTEMS INDIA PVT LTD","https://sada.com/"
|
||||
"SAHF SOFTWARE TECHNOLOGY (P) LTD","https://sahfsys.net/"
|
||||
"SCALASOFT GLOBAL SOLUTIONS (P) LTD","http://www.scalasoftglobal.com/"
|
||||
"SCALGO TECHNOLOGIES (P) LTD","https://www.scalgo.net/"
|
||||
"SCARBOROUGH IMMIGRATION SERVICES (P) LTD","https://www.scarboroughindia.com/"
|
||||
"SCIENTIFIC VISION (P) LTD","http://www.svsoftware.org/"
|
||||
"SE-MENTOR SOLUTIONS (P) LTD","http://www.se-mentor.com/"
|
||||
"SEAVIEW SUPPORT SYSTEMS (P) LTD","http://www.svw.com/"
|
||||
"SENTORUS BUSINESS SOLUTIONS (P) LTD","https://www.sentorus.io/"
|
||||
"SEPTA MILLES (P) LTD (LIFOLOGY)","http://www.lifology.com/"
|
||||
"SEQATO SOFTWARE SOLUTIONS (P) LTD","http://www.seqato.com/"
|
||||
"SEQUANTIX (P) LTD","https://www.sequantix.com/"
|
||||
"SEQUOIAAT INDIA (P) LTD","http://www.sequoiaat.com/"
|
||||
"SFO TECHNOLOGIES (P) LTD (A NEST GROUP COMPANY)","http://www.sfotechnologies.net/"
|
||||
"SHELLSQUARE SOFTWARES (P) LTD","http://www.shellsquare.com/"
|
||||
"SHERLINS GLOBAL TECHNOLOGIES (P) LTD","https://sherlins.com/"
|
||||
"SIGTECH WIRELESS TECHNOLOGIES (P) LTD","http://www.sigtechwireless.com/"
|
||||
"SIMPLOGICS SOLUTIONS (P) LTD","http://www.simplogics.com/"
|
||||
"SINURA HEALTH INFORMATION PROCESS SOLUIONS (SHIPS) (P) LTD","http://www.sinurasolutions.com/"
|
||||
"SIX DEE TELECOM SOLUTIONS (P) LTD","https://www.6dtechnologies.com/"
|
||||
"SJS BUSINESS SOLUTION (P) LTD","https://www.sodisys.de/"
|
||||
"SKYSMILE TECHNOLOGIES (P) LTD","https://www.skysmiletechnologies.com/"
|
||||
"SKYWALK GLOBAL INDIA (P) LTD","https://www.skywalkglobal.net/"
|
||||
"SMARTHMS & SOLUTIONS (P) LTD","https://shmsolutions.in/"
|
||||
"SOCIUS INNOVATIVE GLOBAL BRAINS (P) LTD","http://www.sociusigb.com/"
|
||||
"SOFTNOTIONS TECHNOLOGIES (P) LTD","https://softnotions.com/"
|
||||
"SOLUTINO TECHNOLOGIES (P) LTD","http://www.solutinotechnologies.com/"
|
||||
"SOMNOWARE HEALTHCARE SYSTEMS (P) LTD","http://www.somnoware.com/"
|
||||
"SPARKLING DESIGN AND INFOTECH (P) LTD","http://www.sparklingapps.com/"
|
||||
"SPERICORN TECHNOLOGY (P) LTD","http://www.spericorn.com/"
|
||||
"SPERIDIAN TECHNOLOGIES (P) LTD","http://www.speridian.com/"
|
||||
"SPERIDIAN TECHNOLOGIES (P) LTD","http://www.speridian.com/"
|
||||
"SREE ANAND TRAVEL & TECHNOLOGIES (P) LTD","http://www.anandtravel.com/"
|
||||
"SRISHTI INNOVATIVE COMPUTER SYSTEMS (P) LTD","http://www.srishtis.com/"
|
||||
"SRISHTI INNOVATIVE EDUCATIONAL SERVICES (P) LTD","https://www.srishtis.com/"
|
||||
"SRS GLOBAL TECHNOLOGIES (P) LTD","https://srsglobaltechnologies.com/"
|
||||
"SRV IT HUB (P) LTD","https://www.kerala.gov.in/"
|
||||
"STABILIX SOLUTIONS (P) LTD","http://www.stabilix.com/"
|
||||
"STACKMOD INNOVATIONS (P) LTD","https://www.stackmod.io/"
|
||||
"STANDOUT IT SOLUTIONS (P) LTD","http://www.sitstech.com/"
|
||||
"STORYGAME (P) LTD","https://storygame.io/"
|
||||
"SUBDINE SOLUTIONS (P) LTD","http://www.subdine.com/"
|
||||
"SUBHOSTING INNOVATIONS (P) LTD","http://www.subhosting.net/"
|
||||
"SUSOPT SOLUTIONS (P) LTD","http://www.susopt.com/"
|
||||
"SWEANS TECHNOLOGIES (P) LTD","http://www.sweans.com/"
|
||||
"SYNCRAYONS TECHNOLOGIES (P) LTD","http://www.syncrayons.com/"
|
||||
"SYNTRIO TECHNOLOGIES (P) LTD","http://www.syntrio.in/"
|
||||
"TANGENTIA TRAVANCORE (P) LTD","https://www.tangentia.com/"
|
||||
"TATA ELXSI","http://www.tataelxsi.com/"
|
||||
"TBF TECHNOLOGY (P) LTD","http://www.tbfventures.com/"
|
||||
"TECHBAND TECHNOLOGIES (P) LTD","http://www.corpozone.com/"
|
||||
"TECHNOCIL (P) LTD","http://www.technocil.com/"
|
||||
"TECHNOGRAPH DIGITAL SOLUTIONS PRIVATE LIMITED","http://www.suwaidillc.com/"
|
||||
"TECHSTAS INFO SOLUTIONS (P) LTD","http://www.techstas.com/"
|
||||
"TECHVANTAGE SYSTEMS (P) LTD","http://www.techvantagesystems.com/"
|
||||
"TECHVERSANT INFOTECH (P) LTD","http://www.techversantinfotech.com/"
|
||||
"TECHZERA INFOLOGICS (P) LTD","https://www.techzera.in/"
|
||||
"TELCOTECH SOLUTIONS HUB (P) LTD","https://www.telcotechsolutionshub.com/"
|
||||
"TELIKOZ INFOTECH (P) LTD","https://telikoz.com/"
|
||||
"TERRIFIC MINDS (P) LTD","http://www.terrificminds.com/"
|
||||
"TERVEYS TECHNOLOGY SOLUTIONS (P) LTD","https://www.terveystech.com/"
|
||||
"TESTHOUSE INDIA (P) LTD","http://www.testhouse.net/"
|
||||
"THINKPALM TECHNOLOGIES (P) LTD","http://www.thinkpalm.com/"
|
||||
"THOUGHT RIPPLES TECHNOLOGIES (P) LTD","http://www.thoughtripples.com/"
|
||||
"THOUGHTLINE TECHNOLOGIES (P) LTD","http://www.thoughtlinetech.com/"
|
||||
"THREE SEAS INFOLOGICS (P) LTD","http://www.threeseasinfologics.com/"
|
||||
"TIGRID TECHNOLOGIES (P) LTD","https://www.tigrid.in/"
|
||||
"TILTLABS CONSULTANCY SERVICES (P) LTD","http://www.tiltlabs.io/"
|
||||
"TIMESWORLD MEDIA AND TECHNOLOGY SOLUTIONS (P) LTD","https://www.timesworld.com/"
|
||||
"TKEY EDUCATION SOLUTIONS (P) LTD [ARBOR]","https://arbor-education.com/"
|
||||
"TOONZ ANIMATION INDIA (P) LTD","http://www.toonz.co/"
|
||||
"TOSIL SYSTEMS (P) LTD","http://www.tosil-systems.com/"
|
||||
"TRAINONEX SOLUTIONS (P) LTD","https://www.kerala.gov.in/"
|
||||
"TRAVANCORE ANALYTICS (P) LTD","http://www.travancoreanalytics.com/"
|
||||
"TRAVANLEO INFO SOLUTIONS INDIA (P) LTD","http://www.travanleo.com/"
|
||||
"TRAVELSHORE TECHNOLOGIES (P) LTD","http://www.travelshore.com/"
|
||||
"TREEZ INDIA (ZEERT POS SOFTWARE DEVELOPMENT (P) LTD)","https://www.treez.io/"
|
||||
"TRENSER TECHNOLOGY SOLUTIONS (P) LTD","http://www.trenser.com/"
|
||||
"TRIASSIC SOLUTIONS (P) LTD","http://www.triassicsolutions.com/"
|
||||
"TRICTA TECHNOLOGIES (P) LTD","https://www.tricta.com/"
|
||||
"TRIVAND TECHNOLOGIES (P) LTD","http://www.trivand.com/"
|
||||
"TRIZENT TECHNOLOGIES (P) LTD","https://www.trizentinc.com/"
|
||||
"TROIS INFOTECH (P) LTD","http://www.trois.in/"
|
||||
"TROODON TECHNOLOGY","http://www.troodontechnology.com/"
|
||||
"TRYZENS INDIA (P) LTD","http://www.tryzens.com/"
|
||||
"TRYZENS INDIA (P) LTD","https://tryzens.com/"
|
||||
"TWINSWAY TECHNOLOGIES (P) LTD","http://www.twinsway.com/"
|
||||
"UPCOMMUNE BUSINESS NETWORK (P) LTD","https://www.upcommune.net/"
|
||||
"UPSKILL INNOVATIVE SOLUTIONS (P) LTD","http://www.olivegroup.io/"
|
||||
"UVJ TECHNOLOGIES (P) LTD","https://www.uvjtech.com/"
|
||||
"VALORIZ DIGITAL (P) LTD","http://www.valoriz.com/"
|
||||
"VANILLA NETWORKS (P) LTD","http://www.vanillanetworks.co.in/"
|
||||
"VELOSIT INFO LAB (P) LTD","https://www.velosit.in/"
|
||||
"VENTURA SYSTEMS PRIVATE LIMITED","http://www.venturasystems.co/"
|
||||
"VERISTICS NETWORKS (P) LTD","http://www.veristics.in/"
|
||||
"VIMEG SQUARE TECHNOLOGIES (P) LTD","http://www.vimegsquare.com/"
|
||||
"VINVISH TECHNOLOGIES (P) LTD","http://www.vinvish.com/"
|
||||
"VIRTUOSOFT TECHNOLOGIES (P) LTD","https://virtuosoftinc.com/"
|
||||
"VIRTUS IT SERVICES (P) LTD","http://www.virtusindia.com/"
|
||||
"VISMAYA INFOTECH SOLUTIONS (P) LTD","https://vismayacorp.com/"
|
||||
"VISTEON TECHNICAL & SERVICE CENTER (P) LTD","https://www.visteon.com/"
|
||||
"VOXFOREM TECHNOLOGIES (P) LTD","http://www.voxforem.org/"
|
||||
"VRITEUP","http://www.vriteup.com/"
|
||||
"VRIZE INDIA (P) LTD","https://www.vrize.com/"
|
||||
"VYUS TECHNOLOGIA LLP","http://www.vyustechnologia.com/"
|
||||
"WAIB3 TECHNOLOGIES (OPC) PVT. LTD","https://www.waib3tech.com/"
|
||||
"WAY DOT COM (P) LTD","http://www.way.com/"
|
||||
"WAYBEO TECHNOLOGY SOLUTIONS (P) LTD","http://www.waybeo.com/"
|
||||
"WEBOFFICE INFOTECH INDIA (P) LTD","http://www.webofficeit.com/"
|
||||
"WEBSORBZ PRIVATE LIMITED","https://websorbz.com/"
|
||||
"WIKTA IT SERVICES (P) LTD","https://wiktait.com/"
|
||||
"WORKPLAZE INNOVATIONS","https://workplaze.com/"
|
||||
"WORKSHAALA SPACES","https://workshaala.com/"
|
||||
"XIGOLOGIX PVT. LTD.","http://www.xigologix.com/"
|
||||
"XILLIGENCE","http://www.xilligence.com/"
|
||||
"XMINDS INFOTECH (P) LTD","http://www.xminds.com/"
|
||||
"XPETIZE TECHNOLOGY SOLUTIONS (P) LTD","http://www.xpetize.com/"
|
||||
"XTREME ONLINE SOLUTIONS (P) LTD","https://www.xtremeonline.in/"
|
||||
"Y2Z LABS (P) LTD","http://www.y2zlabs.com/"
|
||||
"YARAB TECHNOLOGIES (P) LTD","http://www.yrtechnologies.co.in/"
|
||||
"YOURVISION SOFTWARE SOLUTIONS LLP","http://www.yourvision.co.in/"
|
||||
"YSC ENGINEERING SERVICES (P) LTD","http://www.yscindia.com/"
|
||||
"ZAFIN SOFTWARE CENTRE OF EXCELLENCE (P)LTD","http://www.zafin.com/"
|
||||
"ZEBU ANIMATION STUDIOS (P) LTD","http://www.zebuanimation.com/"
|
||||
"ZENTURIOTECH (P) LTD","https://zenturiotech.com/"
|
||||
"ZEROEARTH","https://www.zeroearth.company/"
|
||||
"ZESTYBEANZ TECHNOLOGIES (P) LTD","http://www.zbeanztech.com/"
|
||||
"ZEWIA SOFTWARE SOLUTIONS (P) LTD","http://www.zewiasoft.com/"
|
||||
"ZL SOFTWARE SYSTEMS (P) LTD","https://www.zlinkcorp.com/"
|
||||
"ZONERGIA SERVICES PVT LTD","https://www.zonergia.com/"
|
||||
"ZOONDIA SOFTWARE (P) LTD","http://www.zoondia.com/"
|
||||
"ZYBO TECH LAB (P) LTD","https://zybotechlab.com/"
|
||||
"ZYXWARE TECHNOLOGIES (P) LTD","http://www.zyxware.com/"
|
||||
|
1
node_modules/.bin/json2csv
generated
vendored
Symbolic link
1
node_modules/.bin/json2csv
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../json2csv/bin/json2csv.js
|
||||
1
node_modules/.bin/playwright
generated
vendored
Symbolic link
1
node_modules/.bin/playwright
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../playwright/cli.js
|
||||
1
node_modules/.bin/playwright-core
generated
vendored
Symbolic link
1
node_modules/.bin/playwright-core
generated
vendored
Symbolic link
@ -0,0 +1 @@
|
||||
../playwright-core/cli.js
|
||||
77
node_modules/.package-lock.json
generated
vendored
Normal file
77
node_modules/.package-lock.json
generated
vendored
Normal file
@ -0,0 +1,77 @@
|
||||
{
|
||||
"name": "technopark-scraper",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@streamparser/json": {
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@streamparser/json/-/json-0.0.6.tgz",
|
||||
"integrity": "sha512-vL9EVn/v+OhZ+Wcs6O4iKE9EUpwHUqHmCtNUMWjqp+6dr85+XPOSGTEsqYNq1Vn04uk9SWlOVmx9J48ggJVT2Q=="
|
||||
},
|
||||
"node_modules/commander": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
|
||||
"integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/csv-writer": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/csv-writer/-/csv-writer-1.6.0.tgz",
|
||||
"integrity": "sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g=="
|
||||
},
|
||||
"node_modules/json2csv": {
|
||||
"version": "6.0.0-alpha.2",
|
||||
"resolved": "https://registry.npmjs.org/json2csv/-/json2csv-6.0.0-alpha.2.tgz",
|
||||
"integrity": "sha512-nJ3oP6QxN8z69IT1HmrJdfVxhU1kLTBVgMfRnNZc37YEY+jZ4nU27rBGxT4vaqM/KUCavLRhntmTuBFqZLBUcA==",
|
||||
"dependencies": {
|
||||
"@streamparser/json": "^0.0.6",
|
||||
"commander": "^6.2.0",
|
||||
"lodash.get": "^4.4.2"
|
||||
},
|
||||
"bin": {
|
||||
"json2csv": "bin/json2csv.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12",
|
||||
"npm": ">= 6.13.0"
|
||||
}
|
||||
},
|
||||
"node_modules/lodash.get": {
|
||||
"version": "4.4.2",
|
||||
"resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
|
||||
"integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==",
|
||||
"deprecated": "This package is deprecated. Use the optional chaining (?.) operator instead."
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.55.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.55.0.tgz",
|
||||
"integrity": "sha512-sdCWStblvV1YU909Xqx0DhOjPZE4/5lJsIS84IfN9dAZfcl/CIZ5O8l3o0j7hPMjDvqoTF8ZUcc+i/GL5erstA==",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.55.0"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.55.0",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.55.0.tgz",
|
||||
"integrity": "sha512-GvZs4vU3U5ro2nZpeiwyb0zuFaqb9sUiAJuyrWpcGouD8y9/HLgGbNRjIph7zU9D3hnPaisMl9zG9CgFi/biIg==",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
node_modules/@streamparser/json/.eslintrc.js
generated
vendored
Normal file
16
node_modules/@streamparser/json/.eslintrc.js
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
module.exports = {
|
||||
parser: "@typescript-eslint/parser", // Specifies the ESLint parser
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features
|
||||
sourceType: "module", // Allows for the use of imports
|
||||
},
|
||||
extends: [
|
||||
"plugin:@typescript-eslint/recommended", // Uses the recommended rules from the @typescript-eslint/eslint-plugin
|
||||
"prettier", // Uses eslint-config-prettier to disable ESLint rules from @typescript-eslint/eslint-plugin that would conflict with prettier
|
||||
"plugin:prettier/recommended" // Enables eslint-plugin-prettier and eslint-config-prettier. This will display prettier errors as ESLint errors. Make sure this is always the last configuration in the extends array.
|
||||
],
|
||||
rules: {
|
||||
// Place to specify ESLint rules. Can be used to overwrite rules specified from the extended configs
|
||||
// e.g. "@typescript-eslint/explicit-function-return-type": "off",
|
||||
},
|
||||
};
|
||||
33
node_modules/@streamparser/json/.github/workflows/on-push.yaml
generated
vendored
Normal file
33
node_modules/@streamparser/json/.github/workflows/on-push.yaml
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
name: Node.js CI
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12.x, 14.x, 12.x]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Use Node.js ${{ matrix.node-version }}
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- name: Cache Node.js modules
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
# npm cache files are stored in `~/.npm` on Linux/macOS
|
||||
path: ~/.npm
|
||||
key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.OS }}-node-
|
||||
${{ runner.OS }}-
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm test
|
||||
- run: npm run build
|
||||
env:
|
||||
CI: true
|
||||
19
node_modules/@streamparser/json/.github/workflows/on-release.yaml
generated
vendored
Normal file
19
node_modules/@streamparser/json/.github/workflows/on-release.yaml
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: Node.js Publish
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
# Setup .npmrc file to publish to npm
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '16.x'
|
||||
# Needs to be explicitly specified for auth to work
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
- run: npm ci
|
||||
- run: npm publish --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
21
node_modules/@streamparser/json/LICENSE
generated
vendored
Normal file
21
node_modules/@streamparser/json/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Juanjo Diaz
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
329
node_modules/@streamparser/json/README.md
generated
vendored
Normal file
329
node_modules/@streamparser/json/README.md
generated
vendored
Normal file
@ -0,0 +1,329 @@
|
||||
# @streamparser/json
|
||||
|
||||
Fast dependency-free library to parse a JSON stream using utf-8 encoding in Node.js, Deno or any modern browser. Fully compliant with the JSON spec and `JSON.parse(...)`.
|
||||
|
||||
*tldr;*
|
||||
|
||||
```javascript
|
||||
import { JSONparser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONparser();
|
||||
parser.onValue = (value) => { /* process data */}
|
||||
|
||||
// Or passing the stream in several chunks
|
||||
try {
|
||||
parser.write('{ "test": ["a"] }');
|
||||
// onValue will be called 3 times:
|
||||
// "a"
|
||||
// ["a"]
|
||||
// { test: ["a"] }
|
||||
} catch (err) {
|
||||
console.log(err); // handler errors
|
||||
}
|
||||
```
|
||||
|
||||
## Dependencies / Polyfilling
|
||||
|
||||
@streamparser/json requires a few ES6 classes:
|
||||
|
||||
* [Uint8Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array)
|
||||
* [TextEncoder](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
|
||||
* [TextDecoder](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
|
||||
If you are targeting browsers or systems in which these might be missing, you need to polyfil them.
|
||||
|
||||
## Components
|
||||
|
||||
### Tokenizer
|
||||
|
||||
A JSON compliant tokenizer that parses a utf-8 stream into JSON tokens
|
||||
|
||||
```javascript
|
||||
import { Tokenizer } from '@streamparser/json';
|
||||
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
```
|
||||
|
||||
The available options are:
|
||||
|
||||
```javascript
|
||||
{
|
||||
stringBufferSize: <bufferSize>, // set to 0 to don't buffer. Min valid value is 4.
|
||||
numberBufferSize: <bufferSize>, // set to 0 to don't buffer
|
||||
separator: <string>, // separator between object. For example `\n` for nd-js.
|
||||
}
|
||||
```
|
||||
|
||||
If buffer sizes are set to anything else than zero, instead of using a string to apppend the data as it comes in, the data is buffered using a TypedArray. A reasonable size could be `64 * 1024` (64 KB).
|
||||
|
||||
#### Buffering
|
||||
|
||||
When parsing strings or numbers, the parser needs to gather the data in-memory until the whole value is ready.
|
||||
|
||||
Strings are inmutable in Javascript so every string operation creates a new string. The V8 engine, behind Node, Deno and most modern browsers, performs a many different types of optimization. One of this optimizations is to over-allocate memory when it detects many string concatenations. This increases significatly the memory consumption and can easily exhaust your memory when parsing JSON containing very large strings or numbers. For those cases, the parser can buffer the characters using a TypedArray. This requires encoding/decoding from/to the buffer into an actual string once the value is ready. This is done using the `TextEncoder` and `TextDecoder` APIs. Unfortunately, these APIs creates a significant overhead when the strings are small so should be used only when strictly necessary.
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(data: string|typedArray|buffer)** push data into the tokenizer.
|
||||
* **end()** closes the tokenizer so it can not be used anymore. Throws an error if the tokenizer was in the middle of parsing.
|
||||
* **isEnded** readonly boolean property indicating whether the Tokenizer is ended or is still accepting data.
|
||||
* **parseNumber(numberStr)** method used internally to parse numbers. By default, it is equivalent to `Number(numberStr)` but the user can override it if he wants some other behaviour.
|
||||
* **onToken(token: TokenType, value: any, offset: number)** no-op method that the user should override to follow the tokenization process.
|
||||
* **onError(err: Error)** no-op method that the user can override to act on errors. If not set, the write method simply throws synchronously.
|
||||
* **onEnd()** no-op method that the user can override to act when the tokenizer is ended.
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyTokenizer extends Tokenizer {
|
||||
parseNumber(numberStr) {
|
||||
const number = super.parseNumber(numberStr);
|
||||
// if number is too large. Just keep the string.
|
||||
return Number.isFinite(numberStr)) ? number : numberStr;
|
||||
}
|
||||
onToken(token: TokenType, value: any) {
|
||||
if (token = TokenTypes.NUMBER && typeof value === 'string') {
|
||||
super(TokenTypes.STRING, value);
|
||||
} else {
|
||||
super(token, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const myTokenizer = new MyTokenizer();
|
||||
|
||||
// or just overriding it
|
||||
const tokenizer = new Tokenizer();
|
||||
tokenizer.parseNumber = (numberStr) => { ... };
|
||||
tokenizer.onToken = (token, value, offset) => { ... };
|
||||
```
|
||||
|
||||
### TokenParser
|
||||
|
||||
A token parser that processes JSON tokens as emitted by the `Tokenizer` and emits JSON values/objects.
|
||||
|
||||
```javascript
|
||||
import { TokenParser} from '@streamparser/json';
|
||||
|
||||
const tokenParser = new TokenParser(opts);
|
||||
```
|
||||
|
||||
The available options are:
|
||||
|
||||
```javascript
|
||||
{
|
||||
paths: <string[]>,
|
||||
keepStack: <boolean>, // whether to keep all the properties in the stack
|
||||
separator: <string>, // separator between object. For example `\n` for nd-js. If left empty or set to undefined, the token parser will end after parsing the first object. To parse multiple object without any delimiter just set it to the empty string `''`.
|
||||
}
|
||||
```
|
||||
|
||||
* paths: Array of paths to emit. Defaults to `undefined` which emits everything. The paths are intended to suppot jsonpath although at the time being it only supports the root object selector (`$`) and subproperties selectors including wildcards (`$.a`, `$.*`, `$.a.b`, , `$.*.b`, etc).
|
||||
* keepStack: Whether to keep full objects on the stack even if they won't be emitted. Defaults to `true`. When set to `false` the it does preserve properties in the parent object some ancestor will be emitted. This means that the parent object passed to the `onValue` function will be empty, which doesn't reflect the truth, but it's more memory-efficient.
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(token: TokenType, value: any)** push data into the token parser.
|
||||
* **end()** closes the token parser so it can not be used anymore. Throws an error if the tokenizer was in the middle of parsing.
|
||||
* **isEnded** readonly boolean property indicating whether the token parser is ended or is still accepting data.
|
||||
* **onValue(value: any)** no-op method that the user should override to get the parsed value.
|
||||
* **onError(err: Error)** no-op method that the user should override to act on errors. If not set, the write method simply throws synchronously.
|
||||
* **onEnd()** no-op method that the user should override to act when the token parser is ended.
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyTokenParser extends TokenParser {
|
||||
onValue(value: any) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
|
||||
const myTokenParser = new MyTokenParser();
|
||||
|
||||
// or just overriding it
|
||||
const tokenParser = new TokenParser();
|
||||
tokenParser.onValue = (value) => { ... };
|
||||
```
|
||||
|
||||
### JSONparser
|
||||
|
||||
A drop-in replacement of `JSONparse` (with few ~~breaking changes~~ improvements. See below.).
|
||||
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser();
|
||||
```
|
||||
|
||||
It takes the same options as the tokenizer.
|
||||
|
||||
This class is just for convenience. In reality, it simply connects the tokenizer and the parser:
|
||||
|
||||
```javascript
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
const tokenParser = new TokenParser();
|
||||
tokenizer.onToken = this.tokenParser.write.bind(this.parser);
|
||||
tokenParser.onValue = (value) => { /* Process values */ }
|
||||
```
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(token: TokenType, value: any)** alias to the Tokenizer write method.
|
||||
* **end()** alias to the Tokenizer end method.
|
||||
* **isEnded** readonly boolean property indicating whether the JSONparser is ended or is still accepting data.
|
||||
* **onToken(token: TokenType, value: any, offset: number)** alias to the Tokenizer onToken method. (write only).
|
||||
* **onValue(value: any)** alias to the Token Parser onValue method (write only).
|
||||
* **onError(err: Error)** alias to the Tokenizer/Token Parser onError method (write only).
|
||||
* **onEnd()** alias to the Tokenizer onEnd method (which will call the Token Parser onEnd methods) (write only).
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyJsonParser extends JSONParser {
|
||||
onToken(value: any) {
|
||||
// ...
|
||||
}
|
||||
onValue(value: any) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
|
||||
const myJsonParser = new MyJsonParser();
|
||||
|
||||
// or just overriding it
|
||||
const jsonParser = new JSONParser();
|
||||
jsonParser.onToken = (token, value, offset) => { ... };
|
||||
jsonParser.onValue = (value) => { ... };
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
You can use both components independently as
|
||||
|
||||
```javascript
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
const tokenParser = new TokenParser();
|
||||
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
|
||||
```
|
||||
|
||||
You push data using the `write` method which takes a string or an array-like object.
|
||||
|
||||
You can subscribe to the resulting data using the
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined, paths: ['$'] });
|
||||
parser.onValue = console.log;
|
||||
|
||||
parser.write('"Hello world!"'); // logs "Hello world!"
|
||||
|
||||
// Or passing the stream in several chunks
|
||||
parser.write('"');
|
||||
parser.write('Hello');
|
||||
parser.write(' ');
|
||||
parser.write('world!');
|
||||
parser.write('"');// logs "Hello world!"
|
||||
```
|
||||
|
||||
Write is always a synchronous operation so any error during the parsing of the stream will be thrown during the write operation. After an error, the parser can't continue parsing.
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined });
|
||||
parser.onValue = console.log;
|
||||
|
||||
try {
|
||||
parser.write('"""');
|
||||
} catch (err) {
|
||||
console.log(err); // logs
|
||||
}
|
||||
```
|
||||
|
||||
You can also handle errors using callbacks:
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined });
|
||||
parser.onValue = console.log;
|
||||
parser.onError = console.error;
|
||||
|
||||
parser.write('"""');
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Stream-parsing a fetch request returning a JSONstream
|
||||
|
||||
Imagine an endpoint that send a large amount of JSON objects one after the other (`{"id":1}{"id":2}{"id":3}...`).
|
||||
|
||||
```js
|
||||
import { JSONParser} from '@streamparser/json';
|
||||
|
||||
const jsonparser = new JSONParser();
|
||||
jsonparser.onValue = (value, key, parent, stack) => {
|
||||
if (stack > 0) return; // ignore inner values
|
||||
// TODO process element
|
||||
}
|
||||
|
||||
const response = await fetch('http://example.com/');
|
||||
const reader = response.body.getReader();
|
||||
while(true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
jsonparser.write(value);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Stream-parsing a fetch request returning a JSON array
|
||||
|
||||
Imagine an endpoint that send a large amount of JSON objects one after the other (`[{"id":1},{"id":2},{"id":3},...]`).
|
||||
|
||||
```js
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const jsonparser = new JSONParser({ stringBufferSize: undefined, paths: ['$.*'] });
|
||||
jsonparser.onValue = (value, key, parent, stack) => {
|
||||
if (stack.length === 0) /* We are done. Exit. */;
|
||||
// By default, the parser keeps all the child elements in memory until the root parent is emitted.
|
||||
// Let's delete the objects after processing them in order to optimize memory.
|
||||
delete parent[key];
|
||||
// TODO process `value` which will be each of the values in the array.
|
||||
}
|
||||
|
||||
const response = await fetch('http://example.com/');
|
||||
const reader = response.body.getReader();
|
||||
while(true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
jsonparser.write(value);
|
||||
}
|
||||
```
|
||||
|
||||
## Why building this if we have JSONparse
|
||||
|
||||
JSONParser was awesome.... in 2011.
|
||||
|
||||
@streamparser/json strengths include:
|
||||
|
||||
* As performant as the original an even faster in some cases.
|
||||
* Works on the browser.
|
||||
* Allows selector of what to emit.
|
||||
* Well documented.
|
||||
* Better designed and more plugable/configurable by clearly separating the tokenizer and token parser processes.
|
||||
* Simpler and cleaner code. Uses ES6 and doesn't rely on deprecated Node.js methods.
|
||||
* 100% unit test coverage.
|
||||
* Fully compliant with the JSON spec. You will always get the same result as using `JSON.parse()`.
|
||||
|
||||
|
||||
### ~~Breaking changes~~ Improvements compared to JSONparse
|
||||
|
||||
* JSONparse errors keep big number as a string which is not compliant with the spec. With @streamparser/json you can achieve such behaviour by simply overriding the `parseNumber` method.
|
||||
* JSONparse errors on characters above 244 which is not compliant with the spec. @streamparser/json parsed them correctly.
|
||||
* JSONparse incorrectly allows trailing comas in objects or arrays which is not compliant with the spec. @streamparser/json do not.
|
||||
* JSONparse's uses the `onError` callback to handle errors. Since the `write` method is synchronous, @streamparser/json defaults to throwing on error, so wrapping the write operation in a try-catch block captures all possible errors. If the `onError` callback is set, nothing is thrown.
|
||||
* JSONparse uses buffers to parse strings to avoid memory exhaustion if your JSON include very long strings (due to V8 optimizations). This has a performance impact and it is not necessary for most use cases. @streamparser/json uses a string as internal buffer by default to improve performance and allows the user to get the exact same behaviour as in JSONparse by setting the `stringBufferSize` option to `64 * 1024`.
|
||||
* JSONparse parses all valid JSON objects that come through the stream and doesn't support ending the processing. @streamparser/json ends the processing after a single object unless the user explicitly configure a `separator`. When using a separator, the user can end the processing by calling the `end` method which will end the processing and throw and error if the stream is in the middle of parsing something i.e. the JSON passed so far was incomplete/incorrect. Users can use the `onEnd` callback to act when the processing ends.
|
||||
* JSONparse will fail to emit a number until is followed by a non-numeric character, i.e. it will not parse a single number which is valid JSON. @streamparser/json uses the `end` method to emit any possible number that was being parsed before completely ending the processing.
|
||||
57
node_modules/@streamparser/json/build.deno.js
generated
vendored
Normal file
57
node_modules/@streamparser/json/build.deno.js
generated
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require("path");
|
||||
const {
|
||||
mkdirSync,
|
||||
readdirSync,
|
||||
lstatSync,
|
||||
readFileSync,
|
||||
writeFileSync,
|
||||
} = require("fs");
|
||||
|
||||
function copyReadme(dest) {
|
||||
writeFileSync(
|
||||
path.join(dest, "README.md"),
|
||||
readFileSync("./README.md").toString()
|
||||
.replace(
|
||||
/import \{ JSONparser \} from '@streamparser\/json';/gm,
|
||||
"import JSONparser from 'https://deno.land/x/streamparser_json@v0.0.3/jsonparser.ts';/",
|
||||
)
|
||||
.replace(
|
||||
/import { Tokenizer } from '@streamparser\/json';/gm,
|
||||
"import Tokenizer from 'https://deno.land/x/streamparser_json@v0.0.3/tokenizer.ts';/",
|
||||
)
|
||||
.replace(
|
||||
/import { TokenParser } from '@streamparser\/json';/gm,
|
||||
"import TokenParser from 'https://deno.land/x/streamparser_json@v0.0.3/tokenparser.ts';/",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function processDir(src, dest) {
|
||||
mkdirSync(dest, { recursive: true });
|
||||
|
||||
readdirSync(src)
|
||||
.forEach((name) => {
|
||||
const currentPath = path.join(src, name);
|
||||
const destPath = path.join(dest, name);
|
||||
const currentStats = lstatSync(currentPath);
|
||||
if (currentStats.isDirectory()) {
|
||||
processDir(currentPath, destPath);
|
||||
return;
|
||||
}
|
||||
|
||||
writeFileSync(
|
||||
destPath,
|
||||
readFileSync(currentPath).toString().replace(
|
||||
/from "(\.[.\\/-\w]+)"/gm,
|
||||
"from '$1.ts'",
|
||||
),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
const src = process.argv[2]; // './src'
|
||||
const dest = process.argv[3]; // './dist'
|
||||
processDir(src, dest);
|
||||
copyReadme(dest);
|
||||
35
node_modules/@streamparser/json/build.mjs.js
generated
vendored
Normal file
35
node_modules/@streamparser/json/build.mjs.js
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const path = require("path");
|
||||
const {
|
||||
readdirSync,
|
||||
lstatSync,
|
||||
readFileSync,
|
||||
writeFileSync,
|
||||
unlinkSync,
|
||||
} = require("fs");
|
||||
|
||||
function processDir(src) {
|
||||
readdirSync(src)
|
||||
.filter((name) => !/.d.ts$/.test(name))
|
||||
.forEach((name) => {
|
||||
const currentPath = path.join(src, name);
|
||||
const currentStats = lstatSync(currentPath);
|
||||
if (currentStats.isDirectory()) {
|
||||
processDir(currentPath);
|
||||
return;
|
||||
}
|
||||
|
||||
writeFileSync(
|
||||
currentPath.replace(/\.js$/, ".mjs"),
|
||||
readFileSync(currentPath).toString().replace(
|
||||
/from "(\.[.\\/-\w]+)"/gm,
|
||||
"from '$1.mjs'",
|
||||
),
|
||||
);
|
||||
unlinkSync(currentPath);
|
||||
});
|
||||
}
|
||||
|
||||
const src = process.argv[2]; // './dist'
|
||||
processDir(src);
|
||||
5
node_modules/@streamparser/json/dist/cjs/index.d.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/dist/cjs/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export { default as Tokenizer } from "./tokenizer";
|
||||
export { default as TokenParser } from "./tokenparser";
|
||||
export { default as JSONParser } from "./jsonparser";
|
||||
export * as utf8 from "./utils/utf-8";
|
||||
export { TokenType } from "./utils/constants";
|
||||
12
node_modules/@streamparser/json/dist/cjs/index.js
generated
vendored
Normal file
12
node_modules/@streamparser/json/dist/cjs/index.js
generated
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TokenType = exports.utf8 = exports.JSONParser = exports.TokenParser = exports.Tokenizer = void 0;
|
||||
var tokenizer_1 = require("./tokenizer");
|
||||
Object.defineProperty(exports, "Tokenizer", { enumerable: true, get: function () { return tokenizer_1.default; } });
|
||||
var tokenparser_1 = require("./tokenparser");
|
||||
Object.defineProperty(exports, "TokenParser", { enumerable: true, get: function () { return tokenparser_1.default; } });
|
||||
var jsonparser_1 = require("./jsonparser");
|
||||
Object.defineProperty(exports, "JSONParser", { enumerable: true, get: function () { return jsonparser_1.default; } });
|
||||
exports.utf8 = require("./utils/utf-8");
|
||||
var constants_1 = require("./utils/constants");
|
||||
Object.defineProperty(exports, "TokenType", { enumerable: true, get: function () { return constants_1.TokenType; } });
|
||||
18
node_modules/@streamparser/json/dist/cjs/jsonparser.d.ts
generated
vendored
Normal file
18
node_modules/@streamparser/json/dist/cjs/jsonparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { TokenizerOptions } from "./tokenizer";
|
||||
import { StackElement, TokenParserOptions } from "./tokenparser";
|
||||
import { JsonPrimitive, JsonKey, JsonStruct } from "./utils/types";
|
||||
interface JSONParserOpts extends TokenizerOptions, TokenParserOptions {
|
||||
}
|
||||
export default class JSONParser {
|
||||
private tokenizer;
|
||||
private tokenParser;
|
||||
constructor(opts?: JSONParserOpts);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
end(): void;
|
||||
set onToken(cb: (token: number, value: JsonPrimitive, offset: number) => void);
|
||||
set onValue(cb: (value: JsonPrimitive | JsonStruct, key: JsonKey | undefined, parent: JsonStruct | undefined, stack: StackElement[]) => void);
|
||||
set onError(cb: (err: Error) => void);
|
||||
set onEnd(cb: () => void);
|
||||
}
|
||||
export {};
|
||||
46
node_modules/@streamparser/json/dist/cjs/jsonparser.js
generated
vendored
Normal file
46
node_modules/@streamparser/json/dist/cjs/jsonparser.js
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const tokenizer_1 = require("./tokenizer");
|
||||
const tokenparser_1 = require("./tokenparser");
|
||||
class JSONParser {
|
||||
constructor(opts = {}) {
|
||||
this.tokenizer = new tokenizer_1.default(opts);
|
||||
this.tokenParser = new tokenparser_1.default(opts);
|
||||
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
|
||||
this.tokenizer.onEnd = () => {
|
||||
if (!this.tokenParser.isEnded)
|
||||
this.tokenParser.end();
|
||||
};
|
||||
this.tokenParser.onError = this.tokenizer.error.bind(this.tokenizer);
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded)
|
||||
this.tokenizer.end();
|
||||
};
|
||||
}
|
||||
get isEnded() {
|
||||
return this.tokenizer.isEnded && this.tokenParser.isEnded;
|
||||
}
|
||||
write(input) {
|
||||
this.tokenizer.write(input);
|
||||
}
|
||||
end() {
|
||||
this.tokenizer.end();
|
||||
}
|
||||
set onToken(cb) {
|
||||
this.tokenizer.onToken = cb;
|
||||
}
|
||||
set onValue(cb) {
|
||||
this.tokenParser.onValue = cb;
|
||||
}
|
||||
set onError(cb) {
|
||||
this.tokenizer.onError = cb;
|
||||
}
|
||||
set onEnd(cb) {
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded)
|
||||
this.tokenizer.end();
|
||||
cb.call(this.tokenParser);
|
||||
};
|
||||
}
|
||||
}
|
||||
exports.default = JSONParser;
|
||||
45
node_modules/@streamparser/json/dist/cjs/tokenizer.d.ts
generated
vendored
Normal file
45
node_modules/@streamparser/json/dist/cjs/tokenizer.d.ts
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
export interface TokenizerOptions {
|
||||
stringBufferSize?: number;
|
||||
numberBufferSize?: number;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenizerError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class Tokenizer {
|
||||
private state;
|
||||
private separator?;
|
||||
private separatorBytes?;
|
||||
private separatorIndex;
|
||||
private bufferedString;
|
||||
private bufferedNumber;
|
||||
private unicode;
|
||||
private highSurrogate;
|
||||
private bytes_remaining;
|
||||
private bytes_in_sequence;
|
||||
private char_split_buffer;
|
||||
private encoder;
|
||||
private offset;
|
||||
constructor(opts?: TokenizerOptions);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
private emitNumber;
|
||||
protected parseNumber(numberStr: string): number;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onToken(token: TokenType.LEFT_BRACE, value: "{", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACE, value: "}", offset: number): void;
|
||||
onToken(token: TokenType.LEFT_BRACKET, value: "[", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACKET, value: "]", offset: number): void;
|
||||
onToken(token: TokenType.COLON, value: ":", offset: number): void;
|
||||
onToken(token: TokenType.COMMA, value: ",", offset: number): void;
|
||||
onToken(token: TokenType.TRUE, value: true, offset: number): void;
|
||||
onToken(token: TokenType.FALSE, value: false, offset: number): void;
|
||||
onToken(token: TokenType.NULL, value: null, offset: number): void;
|
||||
onToken(token: TokenType.STRING, value: string, offset: number): void;
|
||||
onToken(token: TokenType.NUMBER, value: number, offset: number): void;
|
||||
onToken(token: TokenType.SEPARATOR, value: string, offset: number): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
517
node_modules/@streamparser/json/dist/cjs/tokenizer.js
generated
vendored
Normal file
517
node_modules/@streamparser/json/dist/cjs/tokenizer.js
generated
vendored
Normal file
@ -0,0 +1,517 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TokenizerError = void 0;
|
||||
const utf_8_1 = require("./utils/utf-8");
|
||||
const bufferedString_1 = require("./utils/bufferedString");
|
||||
const constants_1 = require("./utils/constants");
|
||||
const { LEFT_BRACE, RIGHT_BRACE, LEFT_BRACKET, RIGHT_BRACKET, COLON, COMMA, TRUE, FALSE, NULL, STRING, NUMBER, } = constants_1.TokenType;
|
||||
// Tokenizer States
|
||||
var TokenizerStates;
|
||||
(function (TokenizerStates) {
|
||||
TokenizerStates[TokenizerStates["START"] = 0] = "START";
|
||||
TokenizerStates[TokenizerStates["ENDED"] = 1] = "ENDED";
|
||||
TokenizerStates[TokenizerStates["ERROR"] = 2] = "ERROR";
|
||||
TokenizerStates[TokenizerStates["TRUE1"] = 3] = "TRUE1";
|
||||
TokenizerStates[TokenizerStates["TRUE2"] = 4] = "TRUE2";
|
||||
TokenizerStates[TokenizerStates["TRUE3"] = 5] = "TRUE3";
|
||||
TokenizerStates[TokenizerStates["FALSE1"] = 6] = "FALSE1";
|
||||
TokenizerStates[TokenizerStates["FALSE2"] = 7] = "FALSE2";
|
||||
TokenizerStates[TokenizerStates["FALSE3"] = 8] = "FALSE3";
|
||||
TokenizerStates[TokenizerStates["FALSE4"] = 9] = "FALSE4";
|
||||
TokenizerStates[TokenizerStates["NULL1"] = 10] = "NULL1";
|
||||
TokenizerStates[TokenizerStates["NULL2"] = 11] = "NULL2";
|
||||
TokenizerStates[TokenizerStates["NULL3"] = 12] = "NULL3";
|
||||
TokenizerStates[TokenizerStates["STRING_DEFAULT"] = 13] = "STRING_DEFAULT";
|
||||
TokenizerStates[TokenizerStates["STRING_AFTER_BACKSLASH"] = 14] = "STRING_AFTER_BACKSLASH";
|
||||
TokenizerStates[TokenizerStates["STRING_UNICODE_DIGIT_1"] = 15] = "STRING_UNICODE_DIGIT_1";
|
||||
TokenizerStates[TokenizerStates["STRING_UNICODE_DIGIT_2"] = 16] = "STRING_UNICODE_DIGIT_2";
|
||||
TokenizerStates[TokenizerStates["STRING_UNICODE_DIGIT_3"] = 17] = "STRING_UNICODE_DIGIT_3";
|
||||
TokenizerStates[TokenizerStates["STRING_UNICODE_DIGIT_4"] = 18] = "STRING_UNICODE_DIGIT_4";
|
||||
TokenizerStates[TokenizerStates["STRING_INCOMPLETE_CHAR"] = 19] = "STRING_INCOMPLETE_CHAR";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_INITIAL_MINUS"] = 20] = "NUMBER_AFTER_INITIAL_MINUS";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_INITIAL_ZERO"] = 21] = "NUMBER_AFTER_INITIAL_ZERO";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_INITIAL_NON_ZERO"] = 22] = "NUMBER_AFTER_INITIAL_NON_ZERO";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_FULL_STOP"] = 23] = "NUMBER_AFTER_FULL_STOP";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_DECIMAL"] = 24] = "NUMBER_AFTER_DECIMAL";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_E"] = 25] = "NUMBER_AFTER_E";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_E_AND_SIGN"] = 26] = "NUMBER_AFTER_E_AND_SIGN";
|
||||
TokenizerStates[TokenizerStates["NUMBER_AFTER_E_AND_DIGIT"] = 27] = "NUMBER_AFTER_E_AND_DIGIT";
|
||||
TokenizerStates[TokenizerStates["SEPARATOR"] = 28] = "SEPARATOR";
|
||||
})(TokenizerStates || (TokenizerStates = {}));
|
||||
const defaultOpts = {
|
||||
stringBufferSize: 0,
|
||||
numberBufferSize: 0,
|
||||
separator: undefined,
|
||||
};
|
||||
class TokenizerError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenizerError.prototype);
|
||||
}
|
||||
}
|
||||
exports.TokenizerError = TokenizerError;
|
||||
class Tokenizer {
|
||||
constructor(opts) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.separatorIndex = 0;
|
||||
this.unicode = undefined; // unicode escapes
|
||||
this.highSurrogate = undefined;
|
||||
this.bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
|
||||
this.bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
|
||||
this.char_split_buffer = new Uint8Array(4); // for rebuilding chars split before boundary is reached
|
||||
this.encoder = new TextEncoder();
|
||||
this.offset = -1;
|
||||
opts = Object.assign(Object.assign({}, defaultOpts), opts);
|
||||
this.bufferedString =
|
||||
opts.stringBufferSize && opts.stringBufferSize > 4
|
||||
? new bufferedString_1.BufferedString(opts.stringBufferSize)
|
||||
: new bufferedString_1.NonBufferedString();
|
||||
this.bufferedNumber =
|
||||
opts.numberBufferSize && opts.numberBufferSize > 0
|
||||
? new bufferedString_1.BufferedString(opts.numberBufferSize)
|
||||
: new bufferedString_1.NonBufferedString();
|
||||
this.separator = opts.separator;
|
||||
this.separatorBytes = opts.separator
|
||||
? this.encoder.encode(opts.separator)
|
||||
: undefined;
|
||||
}
|
||||
get isEnded() {
|
||||
return this.state === TokenizerStates.ENDED;
|
||||
}
|
||||
write(input) {
|
||||
let buffer;
|
||||
if (input instanceof Uint8Array) {
|
||||
buffer = input;
|
||||
}
|
||||
else if (typeof input === "string") {
|
||||
buffer = this.encoder.encode(input);
|
||||
}
|
||||
else if ((typeof input === "object" && "buffer" in input) ||
|
||||
Array.isArray(input)) {
|
||||
buffer = Uint8Array.from(input);
|
||||
}
|
||||
else {
|
||||
this.error(new TypeError("Unexpected type. The `write` function only accepts Arrays, TypedArrays and Strings."));
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < buffer.length; i += 1) {
|
||||
const n = buffer[i]; // get current byte from buffer
|
||||
switch (this.state) {
|
||||
case TokenizerStates.START:
|
||||
this.offset += 1;
|
||||
if (this.separatorBytes && n === this.separatorBytes[0]) {
|
||||
if (this.separatorBytes.length === 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(constants_1.TokenType.SEPARATOR, this.separator, this.offset + this.separatorBytes.length - 1);
|
||||
continue;
|
||||
}
|
||||
this.state = TokenizerStates.SEPARATOR;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.SPACE ||
|
||||
n === utf_8_1.charset.NEWLINE ||
|
||||
n === utf_8_1.charset.CARRIAGE_RETURN ||
|
||||
n === utf_8_1.charset.TAB) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LEFT_CURLY_BRACKET) {
|
||||
this.onToken(LEFT_BRACE, "{", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.RIGHT_CURLY_BRACKET) {
|
||||
this.onToken(RIGHT_BRACE, "}", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LEFT_SQUARE_BRACKET) {
|
||||
this.onToken(LEFT_BRACKET, "[", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.RIGHT_SQUARE_BRACKET) {
|
||||
this.onToken(RIGHT_BRACKET, "]", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.COLON) {
|
||||
this.onToken(COLON, ":", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.COMMA) {
|
||||
this.onToken(COMMA, ",", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_T) {
|
||||
this.state = TokenizerStates.TRUE1;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_F) {
|
||||
this.state = TokenizerStates.FALSE1;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_N) {
|
||||
this.state = TokenizerStates.NULL1;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.QUOTATION_MARK) {
|
||||
this.bufferedString.reset();
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
if (n >= utf_8_1.charset.DIGIT_ONE && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_MINUS;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// STRING
|
||||
case TokenizerStates.STRING_DEFAULT:
|
||||
if (n === utf_8_1.charset.QUOTATION_MARK) {
|
||||
const string = this.bufferedString.toString();
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(STRING, string, this.offset);
|
||||
this.offset += this.bufferedString.byteLength + 1;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.REVERSE_SOLIDUS) {
|
||||
this.state = TokenizerStates.STRING_AFTER_BACKSLASH;
|
||||
continue;
|
||||
}
|
||||
if (n >= 128) {
|
||||
// Parse multi byte (>=128) chars one at a time
|
||||
if (n >= 194 && n <= 223) {
|
||||
this.bytes_in_sequence = 2;
|
||||
}
|
||||
else if (n <= 239) {
|
||||
this.bytes_in_sequence = 3;
|
||||
}
|
||||
else {
|
||||
this.bytes_in_sequence = 4;
|
||||
}
|
||||
if (this.bytes_in_sequence <= buffer.length - i) {
|
||||
// if bytes needed to complete char fall outside buffer length, we have a boundary split
|
||||
this.bufferedString.appendBuf(buffer, i, i + this.bytes_in_sequence);
|
||||
i += this.bytes_in_sequence - 1;
|
||||
continue;
|
||||
}
|
||||
this.bytes_remaining = i + this.bytes_in_sequence - buffer.length;
|
||||
this.char_split_buffer.set(buffer.subarray(i));
|
||||
i = buffer.length - 1;
|
||||
this.state = TokenizerStates.STRING_INCOMPLETE_CHAR;
|
||||
continue;
|
||||
}
|
||||
if (n >= utf_8_1.charset.SPACE) {
|
||||
this.bufferedString.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.STRING_INCOMPLETE_CHAR:
|
||||
// check for carry over of a multi byte char split between data chunks
|
||||
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
|
||||
this.char_split_buffer.set(buffer.subarray(i, i + this.bytes_remaining), this.bytes_in_sequence - this.bytes_remaining);
|
||||
this.bufferedString.appendBuf(this.char_split_buffer, 0, this.bytes_in_sequence);
|
||||
i = this.bytes_remaining - 1;
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
case TokenizerStates.STRING_AFTER_BACKSLASH:
|
||||
const controlChar = utf_8_1.escapedSequences[n];
|
||||
if (controlChar) {
|
||||
this.bufferedString.appendChar(controlChar);
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_U) {
|
||||
this.unicode = "";
|
||||
this.state = TokenizerStates.STRING_UNICODE_DIGIT_1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_1:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_2:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_3:
|
||||
if ((n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) ||
|
||||
(n >= utf_8_1.charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= utf_8_1.charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= utf_8_1.charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= utf_8_1.charset.LATIN_SMALL_LETTER_F)) {
|
||||
this.unicode += String.fromCharCode(n);
|
||||
this.state += 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_4:
|
||||
if ((n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) ||
|
||||
(n >= utf_8_1.charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= utf_8_1.charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= utf_8_1.charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= utf_8_1.charset.LATIN_SMALL_LETTER_F)) {
|
||||
const intVal = parseInt(this.unicode + String.fromCharCode(n), 16);
|
||||
if (this.highSurrogate === undefined) {
|
||||
if (intVal >= 0xd800 && intVal <= 0xdbff) {
|
||||
//<55296,56319> - highSurrogate
|
||||
this.highSurrogate = intVal;
|
||||
}
|
||||
else {
|
||||
this.bufferedString.appendBuf(this.encoder.encode(String.fromCharCode(intVal)));
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (intVal >= 0xdc00 && intVal <= 0xdfff) {
|
||||
//<56320,57343> - lowSurrogate
|
||||
this.bufferedString.appendBuf(this.encoder.encode(String.fromCharCode(this.highSurrogate, intVal)));
|
||||
}
|
||||
else {
|
||||
this.bufferedString.appendBuf(this.encoder.encode(String.fromCharCode(this.highSurrogate)));
|
||||
}
|
||||
this.highSurrogate = undefined;
|
||||
}
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
// Number
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_MINUS:
|
||||
if (n === utf_8_1.charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
if (n >= utf_8_1.charset.DIGIT_ONE && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
if (n === utf_8_1.charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_E ||
|
||||
n === utf_8_1.charset.LATIN_CAPITAL_LETTER_E) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
if (n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_E ||
|
||||
n === utf_8_1.charset.LATIN_CAPITAL_LETTER_E) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_FULL_STOP:
|
||||
if (n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_DECIMAL;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
if (n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_E ||
|
||||
n === utf_8_1.charset.LATIN_CAPITAL_LETTER_E) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_E:
|
||||
if (n === utf_8_1.charset.PLUS_SIGN || n === utf_8_1.charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_SIGN;
|
||||
continue;
|
||||
}
|
||||
// Allow cascading
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_SIGN:
|
||||
if (n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_DIGIT;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
if (n >= utf_8_1.charset.DIGIT_ZERO && n <= utf_8_1.charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
// TRUE
|
||||
case TokenizerStates.TRUE1:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_R) {
|
||||
this.state = TokenizerStates.TRUE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE2:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.TRUE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE3:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(TRUE, true, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// FALSE
|
||||
case TokenizerStates.FALSE1:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_A) {
|
||||
this.state = TokenizerStates.FALSE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE2:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.FALSE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE3:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_S) {
|
||||
this.state = TokenizerStates.FALSE4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE4:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(FALSE, false, this.offset);
|
||||
this.offset += 4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// NULL
|
||||
case TokenizerStates.NULL1:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.NULL2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL2:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.NULL3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL3:
|
||||
if (n === utf_8_1.charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(NULL, null, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.separatorIndex += 1;
|
||||
if (!this.separatorBytes ||
|
||||
n !== this.separatorBytes[this.separatorIndex]) {
|
||||
break;
|
||||
}
|
||||
if (this.separatorIndex === this.separatorBytes.length - 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(constants_1.TokenType.SEPARATOR, this.separator, this.offset + this.separatorIndex);
|
||||
this.separatorIndex = 0;
|
||||
}
|
||||
continue;
|
||||
case TokenizerStates.ENDED:
|
||||
if (n === utf_8_1.charset.SPACE ||
|
||||
n === utf_8_1.charset.NEWLINE ||
|
||||
n === utf_8_1.charset.CARRIAGE_RETURN ||
|
||||
n === utf_8_1.charset.TAB) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
}
|
||||
this.error(new TokenizerError(`Unexpected "${String.fromCharCode(n)}" at position "${i}" in state ${TokenizerStates[this.state]}`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
emitNumber() {
|
||||
this.onToken(NUMBER, this.parseNumber(this.bufferedNumber.toString()), this.offset);
|
||||
this.offset += this.bufferedNumber.byteLength - 1;
|
||||
}
|
||||
parseNumber(numberStr) {
|
||||
return Number(numberStr);
|
||||
}
|
||||
error(err) {
|
||||
if (this.state !== TokenizerStates.ENDED) {
|
||||
this.state = TokenizerStates.ERROR;
|
||||
}
|
||||
this.onError(err);
|
||||
}
|
||||
end() {
|
||||
switch (this.state) {
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.emitNumber();
|
||||
this.onEnd();
|
||||
break;
|
||||
case TokenizerStates.START:
|
||||
case TokenizerStates.ERROR:
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.onEnd();
|
||||
break;
|
||||
default:
|
||||
this.error(new TokenizerError(`Tokenizer ended in the middle of a token (state: ${TokenizerStates[this.state]}). Either not all the data was received or the data was invalid.`));
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
onToken(token, value, offset) {
|
||||
// Override me
|
||||
throw new TokenizerError('Can\'t emit tokens before the "onToken" callback has been set up.');
|
||||
}
|
||||
onError(err) {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
onEnd() {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
exports.default = Tokenizer;
|
||||
55
node_modules/@streamparser/json/dist/cjs/tokenparser.d.ts
generated
vendored
Normal file
55
node_modules/@streamparser/json/dist/cjs/tokenparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
import { JsonPrimitive, JsonKey, JsonObject, JsonArray, JsonStruct } from "./utils/types";
|
||||
export declare enum TokenParserMode {
|
||||
OBJECT = 0,
|
||||
ARRAY = 1
|
||||
}
|
||||
export interface StackElement {
|
||||
key: JsonKey;
|
||||
value: JsonStruct;
|
||||
mode: TokenParserMode | undefined;
|
||||
emit: boolean;
|
||||
}
|
||||
export interface TokenParserOptions {
|
||||
paths?: string[];
|
||||
keepStack?: boolean;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenParserError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class TokenParser {
|
||||
private readonly paths?;
|
||||
private readonly keepStack;
|
||||
private readonly separator?;
|
||||
private state;
|
||||
private mode;
|
||||
private key;
|
||||
private value;
|
||||
private stack;
|
||||
constructor(opts?: TokenParserOptions);
|
||||
private shouldEmit;
|
||||
private push;
|
||||
private pop;
|
||||
private emit;
|
||||
get isEnded(): boolean;
|
||||
write(token: TokenType.LEFT_BRACE, value: "{"): void;
|
||||
write(token: TokenType.RIGHT_BRACE, value: "}"): void;
|
||||
write(token: TokenType.LEFT_BRACKET, value: "["): void;
|
||||
write(token: TokenType.RIGHT_BRACKET, value: "]"): void;
|
||||
write(token: TokenType.COLON, value: ":"): void;
|
||||
write(token: TokenType.COMMA, value: ","): void;
|
||||
write(token: TokenType.TRUE, value: true): void;
|
||||
write(token: TokenType.FALSE, value: false): void;
|
||||
write(token: TokenType.NULL, value: null): void;
|
||||
write(token: TokenType.STRING, value: string): void;
|
||||
write(token: TokenType.NUMBER, value: number): void;
|
||||
write(token: TokenType.SEPARATOR, value: string): void;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: number, parent: JsonArray, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: string, parent: JsonObject, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: undefined, parent: undefined, stack: []): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
269
node_modules/@streamparser/json/dist/cjs/tokenparser.js
generated
vendored
Normal file
269
node_modules/@streamparser/json/dist/cjs/tokenparser.js
generated
vendored
Normal file
@ -0,0 +1,269 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TokenParserError = exports.TokenParserMode = void 0;
|
||||
const constants_1 = require("./utils/constants");
|
||||
const { LEFT_BRACE, RIGHT_BRACE, LEFT_BRACKET, RIGHT_BRACKET, COLON, COMMA, TRUE, FALSE, NULL, STRING, NUMBER, SEPARATOR, } = constants_1.TokenType;
|
||||
// Parser States
|
||||
var TokenParserState;
|
||||
(function (TokenParserState) {
|
||||
TokenParserState[TokenParserState["VALUE"] = 0] = "VALUE";
|
||||
TokenParserState[TokenParserState["KEY"] = 1] = "KEY";
|
||||
TokenParserState[TokenParserState["COLON"] = 2] = "COLON";
|
||||
TokenParserState[TokenParserState["COMMA"] = 3] = "COMMA";
|
||||
TokenParserState[TokenParserState["ENDED"] = 4] = "ENDED";
|
||||
TokenParserState[TokenParserState["ERROR"] = 5] = "ERROR";
|
||||
TokenParserState[TokenParserState["SEPARATOR"] = 6] = "SEPARATOR";
|
||||
})(TokenParserState || (TokenParserState = {}));
|
||||
// Parser Modes
|
||||
var TokenParserMode;
|
||||
(function (TokenParserMode) {
|
||||
TokenParserMode[TokenParserMode["OBJECT"] = 0] = "OBJECT";
|
||||
TokenParserMode[TokenParserMode["ARRAY"] = 1] = "ARRAY";
|
||||
})(TokenParserMode = exports.TokenParserMode || (exports.TokenParserMode = {}));
|
||||
const defaultOpts = {
|
||||
paths: undefined,
|
||||
keepStack: true,
|
||||
separator: undefined,
|
||||
};
|
||||
class TokenParserError extends Error {
|
||||
constructor(message) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenParserError.prototype);
|
||||
}
|
||||
}
|
||||
exports.TokenParserError = TokenParserError;
|
||||
class TokenParser {
|
||||
constructor(opts) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
this.mode = undefined;
|
||||
this.key = undefined;
|
||||
this.value = undefined;
|
||||
this.stack = [];
|
||||
opts = Object.assign(Object.assign({}, defaultOpts), opts);
|
||||
if (opts.paths) {
|
||||
this.paths = opts.paths.map((path) => {
|
||||
if (path === undefined || path === "$*")
|
||||
return undefined;
|
||||
if (!path.startsWith("$"))
|
||||
throw new TokenParserError(`Invalid selector "${path}". Should start with "$".`);
|
||||
const pathParts = path.split(".").slice(1);
|
||||
if (pathParts.includes(""))
|
||||
throw new TokenParserError(`Invalid selector "${path}". ".." syntax not supported.`);
|
||||
return pathParts;
|
||||
});
|
||||
}
|
||||
this.keepStack = opts.keepStack;
|
||||
this.separator = opts.separator;
|
||||
}
|
||||
shouldEmit() {
|
||||
if (!this.paths)
|
||||
return true;
|
||||
return this.paths.some((path) => {
|
||||
var _a;
|
||||
if (path === undefined)
|
||||
return true;
|
||||
if (path.length !== this.stack.length)
|
||||
return false;
|
||||
for (let i = 0; i < path.length - 1; i++) {
|
||||
const selector = path[i];
|
||||
const key = this.stack[i + 1].key;
|
||||
if (selector === "*")
|
||||
continue;
|
||||
if (selector !== key)
|
||||
return false;
|
||||
}
|
||||
const selector = path[path.length - 1];
|
||||
if (selector === "*")
|
||||
return true;
|
||||
return selector === ((_a = this.key) === null || _a === void 0 ? void 0 : _a.toString());
|
||||
});
|
||||
}
|
||||
push() {
|
||||
this.stack.push({
|
||||
key: this.key,
|
||||
value: this.value,
|
||||
mode: this.mode,
|
||||
emit: this.shouldEmit(),
|
||||
});
|
||||
}
|
||||
pop() {
|
||||
const value = this.value;
|
||||
let emit;
|
||||
({
|
||||
key: this.key,
|
||||
value: this.value,
|
||||
mode: this.mode,
|
||||
emit,
|
||||
} = this.stack.pop());
|
||||
this.state =
|
||||
this.mode !== undefined ? TokenParserState.COMMA : TokenParserState.VALUE;
|
||||
this.emit(value, emit);
|
||||
}
|
||||
emit(value, emit) {
|
||||
if (!this.keepStack &&
|
||||
this.value &&
|
||||
this.stack.every((item) => !item.emit)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
delete this.value[this.key];
|
||||
}
|
||||
if (emit) {
|
||||
this.onValue(value,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.key,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.value, this.stack);
|
||||
}
|
||||
if (this.stack.length === 0) {
|
||||
if (this.separator) {
|
||||
this.state = TokenParserState.SEPARATOR;
|
||||
}
|
||||
else if (this.separator === undefined) {
|
||||
this.end();
|
||||
}
|
||||
// else if separator === '', expect next JSON object.
|
||||
}
|
||||
}
|
||||
get isEnded() {
|
||||
return this.state === TokenParserState.ENDED;
|
||||
}
|
||||
write(token, value) {
|
||||
if (this.state === TokenParserState.VALUE) {
|
||||
if (token === STRING ||
|
||||
token === NUMBER ||
|
||||
token === TRUE ||
|
||||
token === FALSE ||
|
||||
token === NULL) {
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value[this.key] = value;
|
||||
this.state = TokenParserState.COMMA;
|
||||
}
|
||||
else if (this.mode === TokenParserMode.ARRAY) {
|
||||
this.value.push(value);
|
||||
this.state = TokenParserState.COMMA;
|
||||
}
|
||||
this.emit(value, this.shouldEmit());
|
||||
return;
|
||||
}
|
||||
if (token === LEFT_BRACE) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = this.value[this.key] = {};
|
||||
}
|
||||
else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val = {};
|
||||
this.value.push(val);
|
||||
this.value = val;
|
||||
}
|
||||
else {
|
||||
this.value = {};
|
||||
}
|
||||
this.mode = TokenParserMode.OBJECT;
|
||||
this.state = TokenParserState.KEY;
|
||||
this.key = undefined;
|
||||
return;
|
||||
}
|
||||
if (token === LEFT_BRACKET) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = this.value[this.key] = [];
|
||||
}
|
||||
else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val = [];
|
||||
this.value.push(val);
|
||||
this.value = val;
|
||||
}
|
||||
else {
|
||||
this.value = [];
|
||||
}
|
||||
this.mode = TokenParserMode.ARRAY;
|
||||
this.state = TokenParserState.VALUE;
|
||||
this.key = 0;
|
||||
return;
|
||||
}
|
||||
if (this.mode === TokenParserMode.ARRAY &&
|
||||
token === RIGHT_BRACKET &&
|
||||
this.value.length === 0) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this.state === TokenParserState.KEY) {
|
||||
if (token === STRING) {
|
||||
this.key = value;
|
||||
this.state = TokenParserState.COLON;
|
||||
return;
|
||||
}
|
||||
if (token === RIGHT_BRACE &&
|
||||
Object.keys(this.value).length === 0) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this.state === TokenParserState.COLON) {
|
||||
if (token === COLON) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this.state === TokenParserState.COMMA) {
|
||||
if (token === COMMA) {
|
||||
if (this.mode === TokenParserMode.ARRAY) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
this.key += 1;
|
||||
return;
|
||||
}
|
||||
/* istanbul ignore else */
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.state = TokenParserState.KEY;
|
||||
return;
|
||||
}
|
||||
}
|
||||
if ((token === RIGHT_BRACE && this.mode === TokenParserMode.OBJECT) ||
|
||||
(token === RIGHT_BRACKET && this.mode === TokenParserMode.ARRAY)) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (this.state === TokenParserState.SEPARATOR) {
|
||||
if (token === SEPARATOR && value === this.separator) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.error(new TokenParserError(`Unexpected ${constants_1.TokenType[token]} (${JSON.stringify(value)}) in state ${TokenParserState[this.state]}`));
|
||||
}
|
||||
error(err) {
|
||||
if (this.state !== TokenParserState.ENDED) {
|
||||
this.state = TokenParserState.ERROR;
|
||||
}
|
||||
this.onError(err);
|
||||
}
|
||||
end() {
|
||||
if ((this.state !== TokenParserState.VALUE &&
|
||||
this.state !== TokenParserState.SEPARATOR) ||
|
||||
this.stack.length > 0) {
|
||||
this.error(new Error(`Parser ended in mid-parsing (state: ${TokenParserState[this.state]}). Either not all the data was received or the data was invalid.`));
|
||||
}
|
||||
else {
|
||||
this.state = TokenParserState.ENDED;
|
||||
this.onEnd();
|
||||
}
|
||||
}
|
||||
onValue(
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
value, key, parent, stack
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars */
|
||||
) {
|
||||
// Override me
|
||||
throw new TokenParserError('Can\'t emit data before the "onValue" callback has been set up.');
|
||||
}
|
||||
onError(err) {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
onEnd() {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
exports.default = TokenParser;
|
||||
29
node_modules/@streamparser/json/dist/cjs/utils/bufferedString.d.ts
generated
vendored
Normal file
29
node_modules/@streamparser/json/dist/cjs/utils/bufferedString.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
export interface StringBuilder {
|
||||
byteLength: number;
|
||||
appendChar: (char: number) => void;
|
||||
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
|
||||
reset: () => void;
|
||||
toString: () => string;
|
||||
}
|
||||
export declare class NonBufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private string;
|
||||
byteLength: number;
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
export declare class BufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private buffer;
|
||||
private bufferOffset;
|
||||
private string;
|
||||
byteLength: number;
|
||||
constructor(bufferSize: number);
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
private flushStringBuffer;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
63
node_modules/@streamparser/json/dist/cjs/utils/bufferedString.js
generated
vendored
Normal file
63
node_modules/@streamparser/json/dist/cjs/utils/bufferedString.js
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BufferedString = exports.NonBufferedString = void 0;
|
||||
class NonBufferedString {
|
||||
constructor() {
|
||||
this.decoder = new TextDecoder("utf-8");
|
||||
this.string = "";
|
||||
this.byteLength = 0;
|
||||
}
|
||||
appendChar(char) {
|
||||
this.string += String.fromCharCode(char);
|
||||
this.byteLength += 1;
|
||||
}
|
||||
appendBuf(buf, start = 0, end = buf.length) {
|
||||
this.string += this.decoder.decode(buf.subarray(start, end));
|
||||
this.byteLength += end - start;
|
||||
}
|
||||
reset() {
|
||||
this.string = "";
|
||||
this.byteLength = 0;
|
||||
}
|
||||
toString() {
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
exports.NonBufferedString = NonBufferedString;
|
||||
class BufferedString {
|
||||
constructor(bufferSize) {
|
||||
this.decoder = new TextDecoder("utf-8");
|
||||
this.bufferOffset = 0;
|
||||
this.string = "";
|
||||
this.byteLength = 0;
|
||||
this.buffer = new Uint8Array(bufferSize);
|
||||
}
|
||||
appendChar(char) {
|
||||
if (this.bufferOffset >= this.buffer.length)
|
||||
this.flushStringBuffer();
|
||||
this.buffer[this.bufferOffset++] = char;
|
||||
this.byteLength += 1;
|
||||
}
|
||||
appendBuf(buf, start = 0, end = buf.length) {
|
||||
const size = end - start;
|
||||
if (this.bufferOffset + size > this.buffer.length)
|
||||
this.flushStringBuffer();
|
||||
this.buffer.set(buf.subarray(start, end), this.bufferOffset);
|
||||
this.bufferOffset += size;
|
||||
this.byteLength += size;
|
||||
}
|
||||
flushStringBuffer() {
|
||||
this.string += this.decoder.decode(this.buffer.subarray(0, this.bufferOffset));
|
||||
this.bufferOffset = 0;
|
||||
}
|
||||
reset() {
|
||||
this.string = "";
|
||||
this.bufferOffset = 0;
|
||||
this.byteLength = 0;
|
||||
}
|
||||
toString() {
|
||||
this.flushStringBuffer();
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
exports.BufferedString = BufferedString;
|
||||
14
node_modules/@streamparser/json/dist/cjs/utils/constants.d.ts
generated
vendored
Normal file
14
node_modules/@streamparser/json/dist/cjs/utils/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
export declare enum TokenType {
|
||||
LEFT_BRACE = 1,
|
||||
RIGHT_BRACE = 2,
|
||||
LEFT_BRACKET = 3,
|
||||
RIGHT_BRACKET = 4,
|
||||
COLON = 5,
|
||||
COMMA = 6,
|
||||
TRUE = 7,
|
||||
FALSE = 8,
|
||||
NULL = 9,
|
||||
STRING = 10,
|
||||
NUMBER = 11,
|
||||
SEPARATOR = 12
|
||||
}
|
||||
18
node_modules/@streamparser/json/dist/cjs/utils/constants.js
generated
vendored
Normal file
18
node_modules/@streamparser/json/dist/cjs/utils/constants.js
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TokenType = void 0;
|
||||
var TokenType;
|
||||
(function (TokenType) {
|
||||
TokenType[TokenType["LEFT_BRACE"] = 1] = "LEFT_BRACE";
|
||||
TokenType[TokenType["RIGHT_BRACE"] = 2] = "RIGHT_BRACE";
|
||||
TokenType[TokenType["LEFT_BRACKET"] = 3] = "LEFT_BRACKET";
|
||||
TokenType[TokenType["RIGHT_BRACKET"] = 4] = "RIGHT_BRACKET";
|
||||
TokenType[TokenType["COLON"] = 5] = "COLON";
|
||||
TokenType[TokenType["COMMA"] = 6] = "COMMA";
|
||||
TokenType[TokenType["TRUE"] = 7] = "TRUE";
|
||||
TokenType[TokenType["FALSE"] = 8] = "FALSE";
|
||||
TokenType[TokenType["NULL"] = 9] = "NULL";
|
||||
TokenType[TokenType["STRING"] = 10] = "STRING";
|
||||
TokenType[TokenType["NUMBER"] = 11] = "NUMBER";
|
||||
TokenType[TokenType["SEPARATOR"] = 12] = "SEPARATOR";
|
||||
})(TokenType = exports.TokenType || (exports.TokenType = {}));
|
||||
7
node_modules/@streamparser/json/dist/cjs/utils/types.d.ts
generated
vendored
Normal file
7
node_modules/@streamparser/json/dist/cjs/utils/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
export declare type JsonPrimitive = string | number | boolean | null;
|
||||
export declare type JsonKey = string | number | undefined;
|
||||
export declare type JsonObject = {
|
||||
[key: string]: JsonPrimitive | JsonStruct;
|
||||
};
|
||||
export declare type JsonArray = (JsonPrimitive | JsonStruct)[];
|
||||
export declare type JsonStruct = JsonObject | JsonArray;
|
||||
2
node_modules/@streamparser/json/dist/cjs/utils/types.js
generated
vendored
Normal file
2
node_modules/@streamparser/json/dist/cjs/utils/types.js
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
105
node_modules/@streamparser/json/dist/cjs/utils/utf-8.d.ts
generated
vendored
Normal file
105
node_modules/@streamparser/json/dist/cjs/utils/utf-8.d.ts
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
export declare enum charset {
|
||||
BACKSPACE = 8,
|
||||
FORM_FEED = 12,
|
||||
NEWLINE = 10,
|
||||
CARRIAGE_RETURN = 13,
|
||||
TAB = 9,
|
||||
SPACE = 32,
|
||||
EXCLAMATION_MARK = 33,
|
||||
QUOTATION_MARK = 34,
|
||||
NUMBER_SIGN = 35,
|
||||
DOLLAR_SIGN = 36,
|
||||
PERCENT_SIGN = 37,
|
||||
AMPERSAND = 38,
|
||||
APOSTROPHE = 39,
|
||||
LEFT_PARENTHESIS = 40,
|
||||
RIGHT_PARENTHESIS = 41,
|
||||
ASTERISK = 42,
|
||||
PLUS_SIGN = 43,
|
||||
COMMA = 44,
|
||||
HYPHEN_MINUS = 45,
|
||||
FULL_STOP = 46,
|
||||
SOLIDUS = 47,
|
||||
DIGIT_ZERO = 48,
|
||||
DIGIT_ONE = 49,
|
||||
DIGIT_TWO = 50,
|
||||
DIGIT_THREE = 51,
|
||||
DIGIT_FOUR = 52,
|
||||
DIGIT_FIVE = 53,
|
||||
DIGIT_SIX = 54,
|
||||
DIGIT_SEVEN = 55,
|
||||
DIGIT_EIGHT = 56,
|
||||
DIGIT_NINE = 57,
|
||||
COLON = 58,
|
||||
SEMICOLON = 59,
|
||||
LESS_THAN_SIGN = 60,
|
||||
EQUALS_SIGN = 61,
|
||||
GREATER_THAN_SIGN = 62,
|
||||
QUESTION_MARK = 63,
|
||||
COMMERCIAL_AT = 64,
|
||||
LATIN_CAPITAL_LETTER_A = 65,
|
||||
LATIN_CAPITAL_LETTER_B = 66,
|
||||
LATIN_CAPITAL_LETTER_C = 67,
|
||||
LATIN_CAPITAL_LETTER_D = 68,
|
||||
LATIN_CAPITAL_LETTER_E = 69,
|
||||
LATIN_CAPITAL_LETTER_F = 70,
|
||||
LATIN_CAPITAL_LETTER_G = 71,
|
||||
LATIN_CAPITAL_LETTER_H = 72,
|
||||
LATIN_CAPITAL_LETTER_I = 73,
|
||||
LATIN_CAPITAL_LETTER_J = 74,
|
||||
LATIN_CAPITAL_LETTER_K = 75,
|
||||
LATIN_CAPITAL_LETTER_L = 76,
|
||||
LATIN_CAPITAL_LETTER_M = 77,
|
||||
LATIN_CAPITAL_LETTER_N = 78,
|
||||
LATIN_CAPITAL_LETTER_O = 79,
|
||||
LATIN_CAPITAL_LETTER_P = 80,
|
||||
LATIN_CAPITAL_LETTER_Q = 81,
|
||||
LATIN_CAPITAL_LETTER_R = 82,
|
||||
LATIN_CAPITAL_LETTER_S = 83,
|
||||
LATIN_CAPITAL_LETTER_T = 84,
|
||||
LATIN_CAPITAL_LETTER_U = 85,
|
||||
LATIN_CAPITAL_LETTER_V = 86,
|
||||
LATIN_CAPITAL_LETTER_W = 87,
|
||||
LATIN_CAPITAL_LETTER_X = 88,
|
||||
LATIN_CAPITAL_LETTER_Y = 89,
|
||||
LATIN_CAPITAL_LETTER_Z = 90,
|
||||
LEFT_SQUARE_BRACKET = 91,
|
||||
REVERSE_SOLIDUS = 92,
|
||||
RIGHT_SQUARE_BRACKET = 93,
|
||||
CIRCUMFLEX_ACCENT = 94,
|
||||
LOW_LINE = 95,
|
||||
GRAVE_ACCENT = 96,
|
||||
LATIN_SMALL_LETTER_A = 97,
|
||||
LATIN_SMALL_LETTER_B = 98,
|
||||
LATIN_SMALL_LETTER_C = 99,
|
||||
LATIN_SMALL_LETTER_D = 100,
|
||||
LATIN_SMALL_LETTER_E = 101,
|
||||
LATIN_SMALL_LETTER_F = 102,
|
||||
LATIN_SMALL_LETTER_G = 103,
|
||||
LATIN_SMALL_LETTER_H = 104,
|
||||
LATIN_SMALL_LETTER_I = 105,
|
||||
LATIN_SMALL_LETTER_J = 106,
|
||||
LATIN_SMALL_LETTER_K = 107,
|
||||
LATIN_SMALL_LETTER_L = 108,
|
||||
LATIN_SMALL_LETTER_M = 109,
|
||||
LATIN_SMALL_LETTER_N = 110,
|
||||
LATIN_SMALL_LETTER_O = 111,
|
||||
LATIN_SMALL_LETTER_P = 112,
|
||||
LATIN_SMALL_LETTER_Q = 113,
|
||||
LATIN_SMALL_LETTER_R = 114,
|
||||
LATIN_SMALL_LETTER_S = 115,
|
||||
LATIN_SMALL_LETTER_T = 116,
|
||||
LATIN_SMALL_LETTER_U = 117,
|
||||
LATIN_SMALL_LETTER_V = 118,
|
||||
LATIN_SMALL_LETTER_W = 119,
|
||||
LATIN_SMALL_LETTER_X = 120,
|
||||
LATIN_SMALL_LETTER_Y = 121,
|
||||
LATIN_SMALL_LETTER_Z = 122,
|
||||
LEFT_CURLY_BRACKET = 123,
|
||||
VERTICAL_LINE = 124,
|
||||
RIGHT_CURLY_BRACKET = 125,
|
||||
TILDE = 126
|
||||
}
|
||||
export declare const escapedSequences: {
|
||||
[key: number]: number;
|
||||
};
|
||||
116
node_modules/@streamparser/json/dist/cjs/utils/utf-8.js
generated
vendored
Normal file
116
node_modules/@streamparser/json/dist/cjs/utils/utf-8.js
generated
vendored
Normal file
@ -0,0 +1,116 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.escapedSequences = exports.charset = void 0;
|
||||
var charset;
|
||||
(function (charset) {
|
||||
charset[charset["BACKSPACE"] = 8] = "BACKSPACE";
|
||||
charset[charset["FORM_FEED"] = 12] = "FORM_FEED";
|
||||
charset[charset["NEWLINE"] = 10] = "NEWLINE";
|
||||
charset[charset["CARRIAGE_RETURN"] = 13] = "CARRIAGE_RETURN";
|
||||
charset[charset["TAB"] = 9] = "TAB";
|
||||
charset[charset["SPACE"] = 32] = "SPACE";
|
||||
charset[charset["EXCLAMATION_MARK"] = 33] = "EXCLAMATION_MARK";
|
||||
charset[charset["QUOTATION_MARK"] = 34] = "QUOTATION_MARK";
|
||||
charset[charset["NUMBER_SIGN"] = 35] = "NUMBER_SIGN";
|
||||
charset[charset["DOLLAR_SIGN"] = 36] = "DOLLAR_SIGN";
|
||||
charset[charset["PERCENT_SIGN"] = 37] = "PERCENT_SIGN";
|
||||
charset[charset["AMPERSAND"] = 38] = "AMPERSAND";
|
||||
charset[charset["APOSTROPHE"] = 39] = "APOSTROPHE";
|
||||
charset[charset["LEFT_PARENTHESIS"] = 40] = "LEFT_PARENTHESIS";
|
||||
charset[charset["RIGHT_PARENTHESIS"] = 41] = "RIGHT_PARENTHESIS";
|
||||
charset[charset["ASTERISK"] = 42] = "ASTERISK";
|
||||
charset[charset["PLUS_SIGN"] = 43] = "PLUS_SIGN";
|
||||
charset[charset["COMMA"] = 44] = "COMMA";
|
||||
charset[charset["HYPHEN_MINUS"] = 45] = "HYPHEN_MINUS";
|
||||
charset[charset["FULL_STOP"] = 46] = "FULL_STOP";
|
||||
charset[charset["SOLIDUS"] = 47] = "SOLIDUS";
|
||||
charset[charset["DIGIT_ZERO"] = 48] = "DIGIT_ZERO";
|
||||
charset[charset["DIGIT_ONE"] = 49] = "DIGIT_ONE";
|
||||
charset[charset["DIGIT_TWO"] = 50] = "DIGIT_TWO";
|
||||
charset[charset["DIGIT_THREE"] = 51] = "DIGIT_THREE";
|
||||
charset[charset["DIGIT_FOUR"] = 52] = "DIGIT_FOUR";
|
||||
charset[charset["DIGIT_FIVE"] = 53] = "DIGIT_FIVE";
|
||||
charset[charset["DIGIT_SIX"] = 54] = "DIGIT_SIX";
|
||||
charset[charset["DIGIT_SEVEN"] = 55] = "DIGIT_SEVEN";
|
||||
charset[charset["DIGIT_EIGHT"] = 56] = "DIGIT_EIGHT";
|
||||
charset[charset["DIGIT_NINE"] = 57] = "DIGIT_NINE";
|
||||
charset[charset["COLON"] = 58] = "COLON";
|
||||
charset[charset["SEMICOLON"] = 59] = "SEMICOLON";
|
||||
charset[charset["LESS_THAN_SIGN"] = 60] = "LESS_THAN_SIGN";
|
||||
charset[charset["EQUALS_SIGN"] = 61] = "EQUALS_SIGN";
|
||||
charset[charset["GREATER_THAN_SIGN"] = 62] = "GREATER_THAN_SIGN";
|
||||
charset[charset["QUESTION_MARK"] = 63] = "QUESTION_MARK";
|
||||
charset[charset["COMMERCIAL_AT"] = 64] = "COMMERCIAL_AT";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_A"] = 65] = "LATIN_CAPITAL_LETTER_A";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_B"] = 66] = "LATIN_CAPITAL_LETTER_B";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_C"] = 67] = "LATIN_CAPITAL_LETTER_C";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_D"] = 68] = "LATIN_CAPITAL_LETTER_D";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_E"] = 69] = "LATIN_CAPITAL_LETTER_E";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_F"] = 70] = "LATIN_CAPITAL_LETTER_F";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_G"] = 71] = "LATIN_CAPITAL_LETTER_G";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_H"] = 72] = "LATIN_CAPITAL_LETTER_H";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_I"] = 73] = "LATIN_CAPITAL_LETTER_I";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_J"] = 74] = "LATIN_CAPITAL_LETTER_J";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_K"] = 75] = "LATIN_CAPITAL_LETTER_K";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_L"] = 76] = "LATIN_CAPITAL_LETTER_L";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_M"] = 77] = "LATIN_CAPITAL_LETTER_M";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_N"] = 78] = "LATIN_CAPITAL_LETTER_N";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_O"] = 79] = "LATIN_CAPITAL_LETTER_O";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_P"] = 80] = "LATIN_CAPITAL_LETTER_P";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_Q"] = 81] = "LATIN_CAPITAL_LETTER_Q";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_R"] = 82] = "LATIN_CAPITAL_LETTER_R";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_S"] = 83] = "LATIN_CAPITAL_LETTER_S";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_T"] = 84] = "LATIN_CAPITAL_LETTER_T";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_U"] = 85] = "LATIN_CAPITAL_LETTER_U";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_V"] = 86] = "LATIN_CAPITAL_LETTER_V";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_W"] = 87] = "LATIN_CAPITAL_LETTER_W";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_X"] = 88] = "LATIN_CAPITAL_LETTER_X";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_Y"] = 89] = "LATIN_CAPITAL_LETTER_Y";
|
||||
charset[charset["LATIN_CAPITAL_LETTER_Z"] = 90] = "LATIN_CAPITAL_LETTER_Z";
|
||||
charset[charset["LEFT_SQUARE_BRACKET"] = 91] = "LEFT_SQUARE_BRACKET";
|
||||
charset[charset["REVERSE_SOLIDUS"] = 92] = "REVERSE_SOLIDUS";
|
||||
charset[charset["RIGHT_SQUARE_BRACKET"] = 93] = "RIGHT_SQUARE_BRACKET";
|
||||
charset[charset["CIRCUMFLEX_ACCENT"] = 94] = "CIRCUMFLEX_ACCENT";
|
||||
charset[charset["LOW_LINE"] = 95] = "LOW_LINE";
|
||||
charset[charset["GRAVE_ACCENT"] = 96] = "GRAVE_ACCENT";
|
||||
charset[charset["LATIN_SMALL_LETTER_A"] = 97] = "LATIN_SMALL_LETTER_A";
|
||||
charset[charset["LATIN_SMALL_LETTER_B"] = 98] = "LATIN_SMALL_LETTER_B";
|
||||
charset[charset["LATIN_SMALL_LETTER_C"] = 99] = "LATIN_SMALL_LETTER_C";
|
||||
charset[charset["LATIN_SMALL_LETTER_D"] = 100] = "LATIN_SMALL_LETTER_D";
|
||||
charset[charset["LATIN_SMALL_LETTER_E"] = 101] = "LATIN_SMALL_LETTER_E";
|
||||
charset[charset["LATIN_SMALL_LETTER_F"] = 102] = "LATIN_SMALL_LETTER_F";
|
||||
charset[charset["LATIN_SMALL_LETTER_G"] = 103] = "LATIN_SMALL_LETTER_G";
|
||||
charset[charset["LATIN_SMALL_LETTER_H"] = 104] = "LATIN_SMALL_LETTER_H";
|
||||
charset[charset["LATIN_SMALL_LETTER_I"] = 105] = "LATIN_SMALL_LETTER_I";
|
||||
charset[charset["LATIN_SMALL_LETTER_J"] = 106] = "LATIN_SMALL_LETTER_J";
|
||||
charset[charset["LATIN_SMALL_LETTER_K"] = 107] = "LATIN_SMALL_LETTER_K";
|
||||
charset[charset["LATIN_SMALL_LETTER_L"] = 108] = "LATIN_SMALL_LETTER_L";
|
||||
charset[charset["LATIN_SMALL_LETTER_M"] = 109] = "LATIN_SMALL_LETTER_M";
|
||||
charset[charset["LATIN_SMALL_LETTER_N"] = 110] = "LATIN_SMALL_LETTER_N";
|
||||
charset[charset["LATIN_SMALL_LETTER_O"] = 111] = "LATIN_SMALL_LETTER_O";
|
||||
charset[charset["LATIN_SMALL_LETTER_P"] = 112] = "LATIN_SMALL_LETTER_P";
|
||||
charset[charset["LATIN_SMALL_LETTER_Q"] = 113] = "LATIN_SMALL_LETTER_Q";
|
||||
charset[charset["LATIN_SMALL_LETTER_R"] = 114] = "LATIN_SMALL_LETTER_R";
|
||||
charset[charset["LATIN_SMALL_LETTER_S"] = 115] = "LATIN_SMALL_LETTER_S";
|
||||
charset[charset["LATIN_SMALL_LETTER_T"] = 116] = "LATIN_SMALL_LETTER_T";
|
||||
charset[charset["LATIN_SMALL_LETTER_U"] = 117] = "LATIN_SMALL_LETTER_U";
|
||||
charset[charset["LATIN_SMALL_LETTER_V"] = 118] = "LATIN_SMALL_LETTER_V";
|
||||
charset[charset["LATIN_SMALL_LETTER_W"] = 119] = "LATIN_SMALL_LETTER_W";
|
||||
charset[charset["LATIN_SMALL_LETTER_X"] = 120] = "LATIN_SMALL_LETTER_X";
|
||||
charset[charset["LATIN_SMALL_LETTER_Y"] = 121] = "LATIN_SMALL_LETTER_Y";
|
||||
charset[charset["LATIN_SMALL_LETTER_Z"] = 122] = "LATIN_SMALL_LETTER_Z";
|
||||
charset[charset["LEFT_CURLY_BRACKET"] = 123] = "LEFT_CURLY_BRACKET";
|
||||
charset[charset["VERTICAL_LINE"] = 124] = "VERTICAL_LINE";
|
||||
charset[charset["RIGHT_CURLY_BRACKET"] = 125] = "RIGHT_CURLY_BRACKET";
|
||||
charset[charset["TILDE"] = 126] = "TILDE";
|
||||
})(charset = exports.charset || (exports.charset = {}));
|
||||
exports.escapedSequences = {
|
||||
[charset.QUOTATION_MARK]: charset.QUOTATION_MARK,
|
||||
[charset.REVERSE_SOLIDUS]: charset.REVERSE_SOLIDUS,
|
||||
[charset.SOLIDUS]: charset.SOLIDUS,
|
||||
[charset.LATIN_SMALL_LETTER_B]: charset.BACKSPACE,
|
||||
[charset.LATIN_SMALL_LETTER_F]: charset.FORM_FEED,
|
||||
[charset.LATIN_SMALL_LETTER_N]: charset.NEWLINE,
|
||||
[charset.LATIN_SMALL_LETTER_R]: charset.CARRIAGE_RETURN,
|
||||
[charset.LATIN_SMALL_LETTER_T]: charset.TAB,
|
||||
};
|
||||
329
node_modules/@streamparser/json/dist/deno/README.md
generated
vendored
Normal file
329
node_modules/@streamparser/json/dist/deno/README.md
generated
vendored
Normal file
@ -0,0 +1,329 @@
|
||||
# @streamparser/json
|
||||
|
||||
Fast dependency-free library to parse a JSON stream using utf-8 encoding in Node.js, Deno or any modern browser. Fully compliant with the JSON spec and `JSON.parse(...)`.
|
||||
|
||||
*tldr;*
|
||||
|
||||
```javascript
|
||||
import JSONparser from 'https://deno.land/x/streamparser_json@v0.0.3/jsonparser.ts';/
|
||||
|
||||
const parser = new JSONparser();
|
||||
parser.onValue = (value) => { /* process data */}
|
||||
|
||||
// Or passing the stream in several chunks
|
||||
try {
|
||||
parser.write('{ "test": ["a"] }');
|
||||
// onValue will be called 3 times:
|
||||
// "a"
|
||||
// ["a"]
|
||||
// { test: ["a"] }
|
||||
} catch (err) {
|
||||
console.log(err); // handler errors
|
||||
}
|
||||
```
|
||||
|
||||
## Dependencies / Polyfilling
|
||||
|
||||
@streamparser/json requires a few ES6 classes:
|
||||
|
||||
* [Uint8Array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array)
|
||||
* [TextEncoder](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
|
||||
* [TextDecoder](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
|
||||
|
||||
If you are targeting browsers or systems in which these might be missing, you need to polyfil them.
|
||||
|
||||
## Components
|
||||
|
||||
### Tokenizer
|
||||
|
||||
A JSON compliant tokenizer that parses a utf-8 stream into JSON tokens
|
||||
|
||||
```javascript
|
||||
import Tokenizer from 'https://deno.land/x/streamparser_json@v0.0.3/tokenizer.ts';/
|
||||
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
```
|
||||
|
||||
The available options are:
|
||||
|
||||
```javascript
|
||||
{
|
||||
stringBufferSize: <bufferSize>, // set to 0 to don't buffer. Min valid value is 4.
|
||||
numberBufferSize: <bufferSize>, // set to 0 to don't buffer
|
||||
separator: <string>, // separator between object. For example `\n` for nd-js.
|
||||
}
|
||||
```
|
||||
|
||||
If buffer sizes are set to anything else than zero, instead of using a string to apppend the data as it comes in, the data is buffered using a TypedArray. A reasonable size could be `64 * 1024` (64 KB).
|
||||
|
||||
#### Buffering
|
||||
|
||||
When parsing strings or numbers, the parser needs to gather the data in-memory until the whole value is ready.
|
||||
|
||||
Strings are inmutable in Javascript so every string operation creates a new string. The V8 engine, behind Node, Deno and most modern browsers, performs a many different types of optimization. One of this optimizations is to over-allocate memory when it detects many string concatenations. This increases significatly the memory consumption and can easily exhaust your memory when parsing JSON containing very large strings or numbers. For those cases, the parser can buffer the characters using a TypedArray. This requires encoding/decoding from/to the buffer into an actual string once the value is ready. This is done using the `TextEncoder` and `TextDecoder` APIs. Unfortunately, these APIs creates a significant overhead when the strings are small so should be used only when strictly necessary.
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(data: string|typedArray|buffer)** push data into the tokenizer.
|
||||
* **end()** closes the tokenizer so it can not be used anymore. Throws an error if the tokenizer was in the middle of parsing.
|
||||
* **isEnded** readonly boolean property indicating whether the Tokenizer is ended or is still accepting data.
|
||||
* **parseNumber(numberStr)** method used internally to parse numbers. By default, it is equivalent to `Number(numberStr)` but the user can override it if he wants some other behaviour.
|
||||
* **onToken(token: TokenType, value: any, offset: number)** no-op method that the user should override to follow the tokenization process.
|
||||
* **onError(err: Error)** no-op method that the user can override to act on errors. If not set, the write method simply throws synchronously.
|
||||
* **onEnd()** no-op method that the user can override to act when the tokenizer is ended.
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyTokenizer extends Tokenizer {
|
||||
parseNumber(numberStr) {
|
||||
const number = super.parseNumber(numberStr);
|
||||
// if number is too large. Just keep the string.
|
||||
return Number.isFinite(numberStr)) ? number : numberStr;
|
||||
}
|
||||
onToken(token: TokenType, value: any) {
|
||||
if (token = TokenTypes.NUMBER && typeof value === 'string') {
|
||||
super(TokenTypes.STRING, value);
|
||||
} else {
|
||||
super(token, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const myTokenizer = new MyTokenizer();
|
||||
|
||||
// or just overriding it
|
||||
const tokenizer = new Tokenizer();
|
||||
tokenizer.parseNumber = (numberStr) => { ... };
|
||||
tokenizer.onToken = (token, value, offset) => { ... };
|
||||
```
|
||||
|
||||
### TokenParser
|
||||
|
||||
A token parser that processes JSON tokens as emitted by the `Tokenizer` and emits JSON values/objects.
|
||||
|
||||
```javascript
|
||||
import { TokenParser} from '@streamparser/json';
|
||||
|
||||
const tokenParser = new TokenParser(opts);
|
||||
```
|
||||
|
||||
The available options are:
|
||||
|
||||
```javascript
|
||||
{
|
||||
paths: <string[]>,
|
||||
keepStack: <boolean>, // whether to keep all the properties in the stack
|
||||
separator: <string>, // separator between object. For example `\n` for nd-js. If left empty or set to undefined, the token parser will end after parsing the first object. To parse multiple object without any delimiter just set it to the empty string `''`.
|
||||
}
|
||||
```
|
||||
|
||||
* paths: Array of paths to emit. Defaults to `undefined` which emits everything. The paths are intended to suppot jsonpath although at the time being it only supports the root object selector (`$`) and subproperties selectors including wildcards (`$.a`, `$.*`, `$.a.b`, , `$.*.b`, etc).
|
||||
* keepStack: Whether to keep full objects on the stack even if they won't be emitted. Defaults to `true`. When set to `false` the it does preserve properties in the parent object some ancestor will be emitted. This means that the parent object passed to the `onValue` function will be empty, which doesn't reflect the truth, but it's more memory-efficient.
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(token: TokenType, value: any)** push data into the token parser.
|
||||
* **end()** closes the token parser so it can not be used anymore. Throws an error if the tokenizer was in the middle of parsing.
|
||||
* **isEnded** readonly boolean property indicating whether the token parser is ended or is still accepting data.
|
||||
* **onValue(value: any)** no-op method that the user should override to get the parsed value.
|
||||
* **onError(err: Error)** no-op method that the user should override to act on errors. If not set, the write method simply throws synchronously.
|
||||
* **onEnd()** no-op method that the user should override to act when the token parser is ended.
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyTokenParser extends TokenParser {
|
||||
onValue(value: any) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
|
||||
const myTokenParser = new MyTokenParser();
|
||||
|
||||
// or just overriding it
|
||||
const tokenParser = new TokenParser();
|
||||
tokenParser.onValue = (value) => { ... };
|
||||
```
|
||||
|
||||
### JSONparser
|
||||
|
||||
A drop-in replacement of `JSONparse` (with few ~~breaking changes~~ improvements. See below.).
|
||||
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser();
|
||||
```
|
||||
|
||||
It takes the same options as the tokenizer.
|
||||
|
||||
This class is just for convenience. In reality, it simply connects the tokenizer and the parser:
|
||||
|
||||
```javascript
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
const tokenParser = new TokenParser();
|
||||
tokenizer.onToken = this.tokenParser.write.bind(this.parser);
|
||||
tokenParser.onValue = (value) => { /* Process values */ }
|
||||
```
|
||||
|
||||
#### Properties & Methods
|
||||
|
||||
* **write(token: TokenType, value: any)** alias to the Tokenizer write method.
|
||||
* **end()** alias to the Tokenizer end method.
|
||||
* **isEnded** readonly boolean property indicating whether the JSONparser is ended or is still accepting data.
|
||||
* **onToken(token: TokenType, value: any, offset: number)** alias to the Tokenizer onToken method. (write only).
|
||||
* **onValue(value: any)** alias to the Token Parser onValue method (write only).
|
||||
* **onError(err: Error)** alias to the Tokenizer/Token Parser onError method (write only).
|
||||
* **onEnd()** alias to the Tokenizer onEnd method (which will call the Token Parser onEnd methods) (write only).
|
||||
|
||||
```javascript
|
||||
// You can override the overridable methods by creating your own class extending Tokenizer
|
||||
class MyJsonParser extends JSONParser {
|
||||
onToken(value: any) {
|
||||
// ...
|
||||
}
|
||||
onValue(value: any) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
|
||||
const myJsonParser = new MyJsonParser();
|
||||
|
||||
// or just overriding it
|
||||
const jsonParser = new JSONParser();
|
||||
jsonParser.onToken = (token, value, offset) => { ... };
|
||||
jsonParser.onValue = (value) => { ... };
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
You can use both components independently as
|
||||
|
||||
```javascript
|
||||
const tokenizer = new Tokenizer(opts);
|
||||
const tokenParser = new TokenParser();
|
||||
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
|
||||
```
|
||||
|
||||
You push data using the `write` method which takes a string or an array-like object.
|
||||
|
||||
You can subscribe to the resulting data using the
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined, paths: ['$'] });
|
||||
parser.onValue = console.log;
|
||||
|
||||
parser.write('"Hello world!"'); // logs "Hello world!"
|
||||
|
||||
// Or passing the stream in several chunks
|
||||
parser.write('"');
|
||||
parser.write('Hello');
|
||||
parser.write(' ');
|
||||
parser.write('world!');
|
||||
parser.write('"');// logs "Hello world!"
|
||||
```
|
||||
|
||||
Write is always a synchronous operation so any error during the parsing of the stream will be thrown during the write operation. After an error, the parser can't continue parsing.
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined });
|
||||
parser.onValue = console.log;
|
||||
|
||||
try {
|
||||
parser.write('"""');
|
||||
} catch (err) {
|
||||
console.log(err); // logs
|
||||
}
|
||||
```
|
||||
|
||||
You can also handle errors using callbacks:
|
||||
|
||||
```javascript
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const parser = new JSONParser({ stringBufferSize: undefined });
|
||||
parser.onValue = console.log;
|
||||
parser.onError = console.error;
|
||||
|
||||
parser.write('"""');
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Stream-parsing a fetch request returning a JSONstream
|
||||
|
||||
Imagine an endpoint that send a large amount of JSON objects one after the other (`{"id":1}{"id":2}{"id":3}...`).
|
||||
|
||||
```js
|
||||
import { JSONParser} from '@streamparser/json';
|
||||
|
||||
const jsonparser = new JSONParser();
|
||||
jsonparser.onValue = (value, key, parent, stack) => {
|
||||
if (stack > 0) return; // ignore inner values
|
||||
// TODO process element
|
||||
}
|
||||
|
||||
const response = await fetch('http://example.com/');
|
||||
const reader = response.body.getReader();
|
||||
while(true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
jsonparser.write(value);
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
### Stream-parsing a fetch request returning a JSON array
|
||||
|
||||
Imagine an endpoint that send a large amount of JSON objects one after the other (`[{"id":1},{"id":2},{"id":3},...]`).
|
||||
|
||||
```js
|
||||
import { JSONParser } from '@streamparser/json';
|
||||
|
||||
const jsonparser = new JSONParser({ stringBufferSize: undefined, paths: ['$.*'] });
|
||||
jsonparser.onValue = (value, key, parent, stack) => {
|
||||
if (stack.length === 0) /* We are done. Exit. */;
|
||||
// By default, the parser keeps all the child elements in memory until the root parent is emitted.
|
||||
// Let's delete the objects after processing them in order to optimize memory.
|
||||
delete parent[key];
|
||||
// TODO process `value` which will be each of the values in the array.
|
||||
}
|
||||
|
||||
const response = await fetch('http://example.com/');
|
||||
const reader = response.body.getReader();
|
||||
while(true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
jsonparser.write(value);
|
||||
}
|
||||
```
|
||||
|
||||
## Why building this if we have JSONparse
|
||||
|
||||
JSONParser was awesome.... in 2011.
|
||||
|
||||
@streamparser/json strengths include:
|
||||
|
||||
* As performant as the original an even faster in some cases.
|
||||
* Works on the browser.
|
||||
* Allows selector of what to emit.
|
||||
* Well documented.
|
||||
* Better designed and more plugable/configurable by clearly separating the tokenizer and token parser processes.
|
||||
* Simpler and cleaner code. Uses ES6 and doesn't rely on deprecated Node.js methods.
|
||||
* 100% unit test coverage.
|
||||
* Fully compliant with the JSON spec. You will always get the same result as using `JSON.parse()`.
|
||||
|
||||
|
||||
### ~~Breaking changes~~ Improvements compared to JSONparse
|
||||
|
||||
* JSONparse errors keep big number as a string which is not compliant with the spec. With @streamparser/json you can achieve such behaviour by simply overriding the `parseNumber` method.
|
||||
* JSONparse errors on characters above 244 which is not compliant with the spec. @streamparser/json parsed them correctly.
|
||||
* JSONparse incorrectly allows trailing comas in objects or arrays which is not compliant with the spec. @streamparser/json do not.
|
||||
* JSONparse's uses the `onError` callback to handle errors. Since the `write` method is synchronous, @streamparser/json defaults to throwing on error, so wrapping the write operation in a try-catch block captures all possible errors. If the `onError` callback is set, nothing is thrown.
|
||||
* JSONparse uses buffers to parse strings to avoid memory exhaustion if your JSON include very long strings (due to V8 optimizations). This has a performance impact and it is not necessary for most use cases. @streamparser/json uses a string as internal buffer by default to improve performance and allows the user to get the exact same behaviour as in JSONparse by setting the `stringBufferSize` option to `64 * 1024`.
|
||||
* JSONparse parses all valid JSON objects that come through the stream and doesn't support ending the processing. @streamparser/json ends the processing after a single object unless the user explicitly configure a `separator`. When using a separator, the user can end the processing by calling the `end` method which will end the processing and throw and error if the stream is in the middle of parsing something i.e. the JSON passed so far was incomplete/incorrect. Users can use the `onEnd` callback to act when the processing ends.
|
||||
* JSONparse will fail to emit a number until is followed by a non-numeric character, i.e. it will not parse a single number which is valid JSON. @streamparser/json uses the `end` method to emit any possible number that was being parsed before completely ending the processing.
|
||||
5
node_modules/@streamparser/json/dist/deno/index.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/dist/deno/index.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export { default as Tokenizer } from './tokenizer.ts';
|
||||
export { default as TokenParser } from './tokenparser.ts';
|
||||
export { default as JSONParser } from './jsonparser.ts';
|
||||
export * as utf8 from './utils/utf-8.ts';
|
||||
export { TokenType } from './utils/constants.ts';
|
||||
65
node_modules/@streamparser/json/dist/deno/jsonparser.ts
generated
vendored
Normal file
65
node_modules/@streamparser/json/dist/deno/jsonparser.ts
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
import Tokenizer, { TokenizerOptions } from './tokenizer.ts';
|
||||
import TokenParser, { StackElement, TokenParserOptions } from './tokenparser.ts';
|
||||
import { JsonPrimitive, JsonKey, JsonStruct } from './utils/types.ts';
|
||||
|
||||
interface JSONParserOpts extends TokenizerOptions, TokenParserOptions {}
|
||||
|
||||
export default class JSONParser {
|
||||
private tokenizer: Tokenizer;
|
||||
private tokenParser: TokenParser;
|
||||
|
||||
constructor(opts: JSONParserOpts = {}) {
|
||||
this.tokenizer = new Tokenizer(opts);
|
||||
this.tokenParser = new TokenParser(opts);
|
||||
|
||||
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
|
||||
this.tokenizer.onEnd = () => {
|
||||
if (!this.tokenParser.isEnded) this.tokenParser.end();
|
||||
};
|
||||
|
||||
this.tokenParser.onError = this.tokenizer.error.bind(this.tokenizer);
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded) this.tokenizer.end();
|
||||
};
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.tokenizer.isEnded && this.tokenParser.isEnded;
|
||||
}
|
||||
|
||||
public write(input: Iterable<number> | string): void {
|
||||
this.tokenizer.write(input);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
this.tokenizer.end();
|
||||
}
|
||||
|
||||
public set onToken(
|
||||
cb: (token: number, value: JsonPrimitive, offset: number) => void
|
||||
) {
|
||||
this.tokenizer.onToken = cb;
|
||||
}
|
||||
|
||||
public set onValue(
|
||||
cb: (
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: JsonKey | undefined,
|
||||
parent: JsonStruct | undefined,
|
||||
stack: StackElement[]
|
||||
) => void
|
||||
) {
|
||||
this.tokenParser.onValue = cb;
|
||||
}
|
||||
|
||||
public set onError(cb: (err: Error) => void) {
|
||||
this.tokenizer.onError = cb;
|
||||
}
|
||||
|
||||
public set onEnd(cb: () => void) {
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded) this.tokenizer.end();
|
||||
cb.call(this.tokenParser);
|
||||
};
|
||||
}
|
||||
}
|
||||
678
node_modules/@streamparser/json/dist/deno/tokenizer.ts
generated
vendored
Normal file
678
node_modules/@streamparser/json/dist/deno/tokenizer.ts
generated
vendored
Normal file
@ -0,0 +1,678 @@
|
||||
import { charset, escapedSequences } from './utils/utf-8.ts';
|
||||
import {
|
||||
StringBuilder,
|
||||
NonBufferedString,
|
||||
BufferedString,
|
||||
} from './utils/bufferedString.ts';
|
||||
import { TokenType } from './utils/constants.ts';
|
||||
import { JsonPrimitive } from './utils/types.ts';
|
||||
|
||||
const {
|
||||
LEFT_BRACE,
|
||||
RIGHT_BRACE,
|
||||
LEFT_BRACKET,
|
||||
RIGHT_BRACKET,
|
||||
COLON,
|
||||
COMMA,
|
||||
TRUE,
|
||||
FALSE,
|
||||
NULL,
|
||||
STRING,
|
||||
NUMBER,
|
||||
} = TokenType;
|
||||
|
||||
// Tokenizer States
|
||||
enum TokenizerStates {
|
||||
START,
|
||||
ENDED,
|
||||
ERROR,
|
||||
TRUE1,
|
||||
TRUE2,
|
||||
TRUE3,
|
||||
FALSE1,
|
||||
FALSE2,
|
||||
FALSE3,
|
||||
FALSE4,
|
||||
NULL1,
|
||||
NULL2,
|
||||
NULL3,
|
||||
STRING_DEFAULT,
|
||||
STRING_AFTER_BACKSLASH,
|
||||
STRING_UNICODE_DIGIT_1,
|
||||
STRING_UNICODE_DIGIT_2,
|
||||
STRING_UNICODE_DIGIT_3,
|
||||
STRING_UNICODE_DIGIT_4,
|
||||
STRING_INCOMPLETE_CHAR,
|
||||
NUMBER_AFTER_INITIAL_MINUS,
|
||||
NUMBER_AFTER_INITIAL_ZERO,
|
||||
NUMBER_AFTER_INITIAL_NON_ZERO,
|
||||
NUMBER_AFTER_FULL_STOP,
|
||||
NUMBER_AFTER_DECIMAL,
|
||||
NUMBER_AFTER_E,
|
||||
NUMBER_AFTER_E_AND_SIGN,
|
||||
NUMBER_AFTER_E_AND_DIGIT,
|
||||
SEPARATOR,
|
||||
}
|
||||
|
||||
export interface TokenizerOptions {
|
||||
stringBufferSize?: number;
|
||||
numberBufferSize?: number;
|
||||
separator?: string;
|
||||
}
|
||||
|
||||
const defaultOpts: TokenizerOptions = {
|
||||
stringBufferSize: 0,
|
||||
numberBufferSize: 0,
|
||||
separator: undefined,
|
||||
};
|
||||
|
||||
export class TokenizerError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenizerError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export default class Tokenizer {
|
||||
private state = TokenizerStates.START;
|
||||
|
||||
private separator?: string;
|
||||
private separatorBytes?: Uint8Array;
|
||||
private separatorIndex = 0;
|
||||
private bufferedString: StringBuilder;
|
||||
private bufferedNumber: StringBuilder;
|
||||
|
||||
private unicode: string | undefined = undefined; // unicode escapes
|
||||
private highSurrogate: number | undefined = undefined;
|
||||
private bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
|
||||
private bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
|
||||
private char_split_buffer = new Uint8Array(4); // for rebuilding chars split before boundary is reached
|
||||
private encoder = new TextEncoder();
|
||||
private offset = -1;
|
||||
|
||||
constructor(opts?: TokenizerOptions) {
|
||||
opts = { ...defaultOpts, ...opts };
|
||||
|
||||
this.bufferedString =
|
||||
opts.stringBufferSize && opts.stringBufferSize > 4
|
||||
? new BufferedString(opts.stringBufferSize)
|
||||
: new NonBufferedString();
|
||||
this.bufferedNumber =
|
||||
opts.numberBufferSize && opts.numberBufferSize > 0
|
||||
? new BufferedString(opts.numberBufferSize)
|
||||
: new NonBufferedString();
|
||||
|
||||
this.separator = opts.separator;
|
||||
this.separatorBytes = opts.separator
|
||||
? this.encoder.encode(opts.separator)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.state === TokenizerStates.ENDED;
|
||||
}
|
||||
|
||||
public write(input: Iterable<number> | string): void {
|
||||
let buffer: Uint8Array;
|
||||
if (input instanceof Uint8Array) {
|
||||
buffer = input;
|
||||
} else if (typeof input === "string") {
|
||||
buffer = this.encoder.encode(input);
|
||||
} else if (
|
||||
(typeof input === "object" && "buffer" in input) ||
|
||||
Array.isArray(input)
|
||||
) {
|
||||
buffer = Uint8Array.from(input);
|
||||
} else {
|
||||
this.error(
|
||||
new TypeError(
|
||||
"Unexpected type. The `write` function only accepts Arrays, TypedArrays and Strings."
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
for (let i = 0; i < buffer.length; i += 1) {
|
||||
const n = buffer[i]; // get current byte from buffer
|
||||
switch (this.state) {
|
||||
case TokenizerStates.START:
|
||||
this.offset += 1;
|
||||
|
||||
if (this.separatorBytes && n === this.separatorBytes[0]) {
|
||||
if (this.separatorBytes.length === 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(
|
||||
TokenType.SEPARATOR,
|
||||
this.separator as string,
|
||||
this.offset + this.separatorBytes.length - 1
|
||||
);
|
||||
continue;
|
||||
}
|
||||
this.state = TokenizerStates.SEPARATOR;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.SPACE ||
|
||||
n === charset.NEWLINE ||
|
||||
n === charset.CARRIAGE_RETURN ||
|
||||
n === charset.TAB
|
||||
) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LEFT_CURLY_BRACKET) {
|
||||
this.onToken(LEFT_BRACE, "{", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.RIGHT_CURLY_BRACKET) {
|
||||
this.onToken(RIGHT_BRACE, "}", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.LEFT_SQUARE_BRACKET) {
|
||||
this.onToken(LEFT_BRACKET, "[", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.RIGHT_SQUARE_BRACKET) {
|
||||
this.onToken(RIGHT_BRACKET, "]", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.COLON) {
|
||||
this.onToken(COLON, ":", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.COMMA) {
|
||||
this.onToken(COMMA, ",", this.offset);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_T) {
|
||||
this.state = TokenizerStates.TRUE1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_F) {
|
||||
this.state = TokenizerStates.FALSE1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_N) {
|
||||
this.state = TokenizerStates.NULL1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.QUOTATION_MARK) {
|
||||
this.bufferedString.reset();
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_MINUS;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
// STRING
|
||||
case TokenizerStates.STRING_DEFAULT:
|
||||
if (n === charset.QUOTATION_MARK) {
|
||||
const string = this.bufferedString.toString();
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(STRING, string, this.offset);
|
||||
this.offset += this.bufferedString.byteLength + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.REVERSE_SOLIDUS) {
|
||||
this.state = TokenizerStates.STRING_AFTER_BACKSLASH;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= 128) {
|
||||
// Parse multi byte (>=128) chars one at a time
|
||||
if (n >= 194 && n <= 223) {
|
||||
this.bytes_in_sequence = 2;
|
||||
} else if (n <= 239) {
|
||||
this.bytes_in_sequence = 3;
|
||||
} else {
|
||||
this.bytes_in_sequence = 4;
|
||||
}
|
||||
|
||||
if (this.bytes_in_sequence <= buffer.length - i) {
|
||||
// if bytes needed to complete char fall outside buffer length, we have a boundary split
|
||||
this.bufferedString.appendBuf(
|
||||
buffer,
|
||||
i,
|
||||
i + this.bytes_in_sequence
|
||||
);
|
||||
i += this.bytes_in_sequence - 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
this.bytes_remaining = i + this.bytes_in_sequence - buffer.length;
|
||||
this.char_split_buffer.set(buffer.subarray(i));
|
||||
i = buffer.length - 1;
|
||||
this.state = TokenizerStates.STRING_INCOMPLETE_CHAR;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.SPACE) {
|
||||
this.bufferedString.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.STRING_INCOMPLETE_CHAR:
|
||||
// check for carry over of a multi byte char split between data chunks
|
||||
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
|
||||
this.char_split_buffer.set(
|
||||
buffer.subarray(i, i + this.bytes_remaining),
|
||||
this.bytes_in_sequence - this.bytes_remaining
|
||||
);
|
||||
this.bufferedString.appendBuf(
|
||||
this.char_split_buffer,
|
||||
0,
|
||||
this.bytes_in_sequence
|
||||
);
|
||||
i = this.bytes_remaining - 1;
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
case TokenizerStates.STRING_AFTER_BACKSLASH:
|
||||
const controlChar = escapedSequences[n];
|
||||
if (controlChar) {
|
||||
this.bufferedString.appendChar(controlChar);
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.unicode = "";
|
||||
this.state = TokenizerStates.STRING_UNICODE_DIGIT_1;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_1:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_2:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_3:
|
||||
if (
|
||||
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
|
||||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= charset.LATIN_SMALL_LETTER_F)
|
||||
) {
|
||||
this.unicode += String.fromCharCode(n);
|
||||
this.state += 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_4:
|
||||
if (
|
||||
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
|
||||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= charset.LATIN_SMALL_LETTER_F)
|
||||
) {
|
||||
const intVal = parseInt(this.unicode + String.fromCharCode(n), 16);
|
||||
if (this.highSurrogate === undefined) {
|
||||
if (intVal >= 0xd800 && intVal <= 0xdbff) {
|
||||
//<55296,56319> - highSurrogate
|
||||
this.highSurrogate = intVal;
|
||||
} else {
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(String.fromCharCode(intVal))
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (intVal >= 0xdc00 && intVal <= 0xdfff) {
|
||||
//<56320,57343> - lowSurrogate
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(
|
||||
String.fromCharCode(this.highSurrogate, intVal)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(String.fromCharCode(this.highSurrogate))
|
||||
);
|
||||
}
|
||||
this.highSurrogate = undefined;
|
||||
}
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
// Number
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_MINUS:
|
||||
if (n === charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
if (n === charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_FULL_STOP:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_DECIMAL;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_E:
|
||||
if (n === charset.PLUS_SIGN || n === charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_SIGN;
|
||||
continue;
|
||||
}
|
||||
// Allow cascading
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_SIGN:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_DIGIT;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
// TRUE
|
||||
case TokenizerStates.TRUE1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_R) {
|
||||
this.state = TokenizerStates.TRUE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.TRUE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(TRUE, true, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// FALSE
|
||||
case TokenizerStates.FALSE1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_A) {
|
||||
this.state = TokenizerStates.FALSE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.FALSE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_S) {
|
||||
this.state = TokenizerStates.FALSE4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE4:
|
||||
if (n === charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(FALSE, false, this.offset);
|
||||
this.offset += 4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// NULL
|
||||
case TokenizerStates.NULL1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.NULL2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.NULL3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(NULL, null, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.separatorIndex += 1;
|
||||
if (
|
||||
!this.separatorBytes ||
|
||||
n !== this.separatorBytes[this.separatorIndex]
|
||||
) {
|
||||
break;
|
||||
}
|
||||
if (this.separatorIndex === this.separatorBytes.length - 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(
|
||||
TokenType.SEPARATOR,
|
||||
this.separator as string,
|
||||
this.offset + this.separatorIndex
|
||||
);
|
||||
this.separatorIndex = 0;
|
||||
}
|
||||
continue;
|
||||
case TokenizerStates.ENDED:
|
||||
if (
|
||||
n === charset.SPACE ||
|
||||
n === charset.NEWLINE ||
|
||||
n === charset.CARRIAGE_RETURN ||
|
||||
n === charset.TAB
|
||||
) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(
|
||||
new TokenizerError(
|
||||
`Unexpected "${String.fromCharCode(n)}" at position "${i}" in state ${
|
||||
TokenizerStates[this.state]
|
||||
}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private emitNumber(): void {
|
||||
this.onToken(
|
||||
NUMBER,
|
||||
this.parseNumber(this.bufferedNumber.toString()),
|
||||
this.offset
|
||||
);
|
||||
this.offset += this.bufferedNumber.byteLength - 1;
|
||||
}
|
||||
|
||||
protected parseNumber(numberStr: string): number {
|
||||
return Number(numberStr);
|
||||
}
|
||||
|
||||
public error(err: Error): void {
|
||||
if (this.state !== TokenizerStates.ENDED) {
|
||||
this.state = TokenizerStates.ERROR;
|
||||
}
|
||||
|
||||
this.onError(err);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
switch (this.state) {
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.emitNumber();
|
||||
this.onEnd();
|
||||
break;
|
||||
case TokenizerStates.START:
|
||||
case TokenizerStates.ERROR:
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.onEnd();
|
||||
break;
|
||||
default:
|
||||
this.error(
|
||||
new TokenizerError(
|
||||
`Tokenizer ended in the middle of a token (state: ${
|
||||
TokenizerStates[this.state]
|
||||
}). Either not all the data was received or the data was invalid.`
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public onToken(token: TokenType.LEFT_BRACE, value: "{", offset: number): void;
|
||||
public onToken(
|
||||
token: TokenType.RIGHT_BRACE,
|
||||
value: "}",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(
|
||||
token: TokenType.LEFT_BRACKET,
|
||||
value: "[",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(
|
||||
token: TokenType.RIGHT_BRACKET,
|
||||
value: "]",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(token: TokenType.COLON, value: ":", offset: number): void;
|
||||
public onToken(token: TokenType.COMMA, value: ",", offset: number): void;
|
||||
public onToken(token: TokenType.TRUE, value: true, offset: number): void;
|
||||
public onToken(token: TokenType.FALSE, value: false, offset: number): void;
|
||||
public onToken(token: TokenType.NULL, value: null, offset: number): void;
|
||||
public onToken(token: TokenType.STRING, value: string, offset: number): void;
|
||||
public onToken(token: TokenType.NUMBER, value: number, offset: number): void;
|
||||
public onToken(
|
||||
token: TokenType.SEPARATOR,
|
||||
value: string,
|
||||
offset: number
|
||||
): void;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
public onToken(token: TokenType, value: JsonPrimitive, offset: number): void {
|
||||
// Override me
|
||||
throw new TokenizerError(
|
||||
'Can\'t emit tokens before the "onToken" callback has been set up.'
|
||||
);
|
||||
}
|
||||
|
||||
public onError(err: Error): void {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
|
||||
public onEnd(): void {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
390
node_modules/@streamparser/json/dist/deno/tokenparser.ts
generated
vendored
Normal file
390
node_modules/@streamparser/json/dist/deno/tokenparser.ts
generated
vendored
Normal file
@ -0,0 +1,390 @@
|
||||
import { TokenType } from './utils/constants.ts';
|
||||
import {
|
||||
JsonPrimitive,
|
||||
JsonKey,
|
||||
JsonObject,
|
||||
JsonArray,
|
||||
JsonStruct,
|
||||
} from './utils/types.ts';
|
||||
|
||||
const {
|
||||
LEFT_BRACE,
|
||||
RIGHT_BRACE,
|
||||
LEFT_BRACKET,
|
||||
RIGHT_BRACKET,
|
||||
COLON,
|
||||
COMMA,
|
||||
TRUE,
|
||||
FALSE,
|
||||
NULL,
|
||||
STRING,
|
||||
NUMBER,
|
||||
SEPARATOR,
|
||||
} = TokenType;
|
||||
|
||||
// Parser States
|
||||
enum TokenParserState {
|
||||
VALUE,
|
||||
KEY,
|
||||
COLON,
|
||||
COMMA,
|
||||
ENDED,
|
||||
ERROR,
|
||||
SEPARATOR,
|
||||
}
|
||||
// Parser Modes
|
||||
export enum TokenParserMode {
|
||||
OBJECT,
|
||||
ARRAY,
|
||||
}
|
||||
|
||||
export interface StackElement {
|
||||
key: JsonKey;
|
||||
value: JsonStruct;
|
||||
mode: TokenParserMode | undefined;
|
||||
emit: boolean;
|
||||
}
|
||||
|
||||
export interface TokenParserOptions {
|
||||
paths?: string[];
|
||||
keepStack?: boolean;
|
||||
separator?: string;
|
||||
}
|
||||
|
||||
const defaultOpts: TokenParserOptions = {
|
||||
paths: undefined,
|
||||
keepStack: true,
|
||||
separator: undefined,
|
||||
};
|
||||
|
||||
export class TokenParserError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenParserError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export default class TokenParser {
|
||||
private readonly paths?: (string[] | undefined)[];
|
||||
private readonly keepStack: boolean;
|
||||
private readonly separator?: string;
|
||||
private state: TokenParserState = TokenParserState.VALUE;
|
||||
private mode: TokenParserMode | undefined = undefined;
|
||||
private key: JsonKey = undefined;
|
||||
private value: JsonPrimitive | JsonStruct | undefined = undefined;
|
||||
private stack: StackElement[] = [];
|
||||
|
||||
constructor(opts?: TokenParserOptions) {
|
||||
opts = { ...defaultOpts, ...opts };
|
||||
|
||||
if (opts.paths) {
|
||||
this.paths = opts.paths.map((path) => {
|
||||
if (path === undefined || path === "$*") return undefined;
|
||||
|
||||
if (!path.startsWith("$"))
|
||||
throw new TokenParserError(
|
||||
`Invalid selector "${path}". Should start with "$".`
|
||||
);
|
||||
const pathParts = path.split(".").slice(1);
|
||||
if (pathParts.includes(""))
|
||||
throw new TokenParserError(
|
||||
`Invalid selector "${path}". ".." syntax not supported.`
|
||||
);
|
||||
return pathParts;
|
||||
});
|
||||
}
|
||||
|
||||
this.keepStack = opts.keepStack as boolean;
|
||||
this.separator = opts.separator;
|
||||
}
|
||||
|
||||
private shouldEmit(): boolean {
|
||||
if (!this.paths) return true;
|
||||
|
||||
return this.paths.some((path) => {
|
||||
if (path === undefined) return true;
|
||||
if (path.length !== this.stack.length) return false;
|
||||
|
||||
for (let i = 0; i < path.length - 1; i++) {
|
||||
const selector = path[i];
|
||||
const key = this.stack[i + 1].key;
|
||||
if (selector === "*") continue;
|
||||
if (selector !== key) return false;
|
||||
}
|
||||
|
||||
const selector = path[path.length - 1];
|
||||
if (selector === "*") return true;
|
||||
return selector === this.key?.toString();
|
||||
});
|
||||
}
|
||||
|
||||
private push(): void {
|
||||
this.stack.push({
|
||||
key: this.key,
|
||||
value: this.value as JsonStruct,
|
||||
mode: this.mode,
|
||||
emit: this.shouldEmit(),
|
||||
});
|
||||
}
|
||||
|
||||
private pop(): void {
|
||||
const value = this.value;
|
||||
|
||||
let emit;
|
||||
({
|
||||
key: this.key,
|
||||
value: this.value,
|
||||
mode: this.mode,
|
||||
emit,
|
||||
} = this.stack.pop() as StackElement);
|
||||
|
||||
this.state =
|
||||
this.mode !== undefined ? TokenParserState.COMMA : TokenParserState.VALUE;
|
||||
|
||||
this.emit(value as JsonPrimitive | JsonStruct, emit);
|
||||
}
|
||||
|
||||
private emit(value: JsonPrimitive | JsonStruct, emit: boolean): void {
|
||||
if (
|
||||
!this.keepStack &&
|
||||
this.value &&
|
||||
this.stack.every((item) => !item.emit)
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
delete (this.value as JsonStruct as any)[this.key as string | number];
|
||||
}
|
||||
|
||||
if (emit) {
|
||||
this.onValue(
|
||||
value,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.key as JsonKey as any,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.value as JsonStruct | undefined as any,
|
||||
this.stack
|
||||
);
|
||||
}
|
||||
|
||||
if (this.stack.length === 0) {
|
||||
if (this.separator) {
|
||||
this.state = TokenParserState.SEPARATOR;
|
||||
} else if (this.separator === undefined) {
|
||||
this.end();
|
||||
}
|
||||
// else if separator === '', expect next JSON object.
|
||||
}
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.state === TokenParserState.ENDED;
|
||||
}
|
||||
|
||||
public write(token: TokenType.LEFT_BRACE, value: "{"): void;
|
||||
public write(token: TokenType.RIGHT_BRACE, value: "}"): void;
|
||||
public write(token: TokenType.LEFT_BRACKET, value: "["): void;
|
||||
public write(token: TokenType.RIGHT_BRACKET, value: "]"): void;
|
||||
public write(token: TokenType.COLON, value: ":"): void;
|
||||
public write(token: TokenType.COMMA, value: ","): void;
|
||||
public write(token: TokenType.TRUE, value: true): void;
|
||||
public write(token: TokenType.FALSE, value: false): void;
|
||||
public write(token: TokenType.NULL, value: null): void;
|
||||
public write(token: TokenType.STRING, value: string): void;
|
||||
public write(token: TokenType.NUMBER, value: number): void;
|
||||
public write(token: TokenType.SEPARATOR, value: string): void;
|
||||
public write(token: TokenType, value: JsonPrimitive): void {
|
||||
if (this.state === TokenParserState.VALUE) {
|
||||
if (
|
||||
token === STRING ||
|
||||
token === NUMBER ||
|
||||
token === TRUE ||
|
||||
token === FALSE ||
|
||||
token === NULL
|
||||
) {
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
(this.value as JsonObject)[this.key as string] = value;
|
||||
this.state = TokenParserState.COMMA;
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
(this.value as JsonArray).push(value);
|
||||
this.state = TokenParserState.COMMA;
|
||||
}
|
||||
|
||||
this.emit(value, this.shouldEmit());
|
||||
return;
|
||||
}
|
||||
|
||||
if (token === LEFT_BRACE) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = (this.value as JsonObject)[this.key as string] = {};
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val = {};
|
||||
(this.value as JsonArray).push(val);
|
||||
this.value = val;
|
||||
} else {
|
||||
this.value = {};
|
||||
}
|
||||
this.mode = TokenParserMode.OBJECT;
|
||||
this.state = TokenParserState.KEY;
|
||||
this.key = undefined;
|
||||
return;
|
||||
}
|
||||
|
||||
if (token === LEFT_BRACKET) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = (this.value as JsonObject)[this.key as string] = [];
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val: JsonArray = [];
|
||||
(this.value as JsonArray).push(val);
|
||||
this.value = val;
|
||||
} else {
|
||||
this.value = [];
|
||||
}
|
||||
this.mode = TokenParserMode.ARRAY;
|
||||
this.state = TokenParserState.VALUE;
|
||||
this.key = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
this.mode === TokenParserMode.ARRAY &&
|
||||
token === RIGHT_BRACKET &&
|
||||
(this.value as JsonArray).length === 0
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.KEY) {
|
||||
if (token === STRING) {
|
||||
this.key = value as string;
|
||||
this.state = TokenParserState.COLON;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
token === RIGHT_BRACE &&
|
||||
Object.keys(this.value as JsonObject).length === 0
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.COLON) {
|
||||
if (token === COLON) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.COMMA) {
|
||||
if (token === COMMA) {
|
||||
if (this.mode === TokenParserMode.ARRAY) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
(this.key as number) += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
/* istanbul ignore else */
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.state = TokenParserState.KEY;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(token === RIGHT_BRACE && this.mode === TokenParserMode.OBJECT) ||
|
||||
(token === RIGHT_BRACKET && this.mode === TokenParserMode.ARRAY)
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.SEPARATOR) {
|
||||
if (token === SEPARATOR && value === this.separator) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(
|
||||
new TokenParserError(
|
||||
`Unexpected ${TokenType[token]} (${JSON.stringify(value)}) in state ${
|
||||
TokenParserState[this.state]
|
||||
}`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public error(err: Error): void {
|
||||
if (this.state !== TokenParserState.ENDED) {
|
||||
this.state = TokenParserState.ERROR;
|
||||
}
|
||||
|
||||
this.onError(err);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
if (
|
||||
(this.state !== TokenParserState.VALUE &&
|
||||
this.state !== TokenParserState.SEPARATOR) ||
|
||||
this.stack.length > 0
|
||||
) {
|
||||
this.error(
|
||||
new Error(
|
||||
`Parser ended in mid-parsing (state: ${
|
||||
TokenParserState[this.state]
|
||||
}). Either not all the data was received or the data was invalid.`
|
||||
)
|
||||
);
|
||||
} else {
|
||||
this.state = TokenParserState.ENDED;
|
||||
this.onEnd();
|
||||
}
|
||||
}
|
||||
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: number,
|
||||
parent: JsonArray,
|
||||
stack: StackElement[]
|
||||
): void;
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: string,
|
||||
parent: JsonObject,
|
||||
stack: StackElement[]
|
||||
): void;
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: undefined,
|
||||
parent: undefined,
|
||||
stack: []
|
||||
): void;
|
||||
public onValue(
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: JsonKey | undefined,
|
||||
parent: JsonStruct | undefined,
|
||||
stack: StackElement[]
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars */
|
||||
): void {
|
||||
// Override me
|
||||
throw new TokenParserError(
|
||||
'Can\'t emit data before the "onValue" callback has been set up.'
|
||||
);
|
||||
}
|
||||
|
||||
public onError(err: Error): void {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
|
||||
public onEnd(): void {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
75
node_modules/@streamparser/json/dist/deno/utils/bufferedString.ts
generated
vendored
Normal file
75
node_modules/@streamparser/json/dist/deno/utils/bufferedString.ts
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
export interface StringBuilder {
|
||||
byteLength: number;
|
||||
appendChar: (char: number) => void;
|
||||
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
|
||||
reset: () => void;
|
||||
toString: () => string;
|
||||
}
|
||||
|
||||
export class NonBufferedString implements StringBuilder {
|
||||
private decoder = new TextDecoder("utf-8");
|
||||
private string = "";
|
||||
public byteLength = 0;
|
||||
|
||||
public appendChar(char: number): void {
|
||||
this.string += String.fromCharCode(char);
|
||||
this.byteLength += 1;
|
||||
}
|
||||
|
||||
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
|
||||
this.string += this.decoder.decode(buf.subarray(start, end));
|
||||
this.byteLength += end - start;
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this.string = "";
|
||||
this.byteLength = 0;
|
||||
}
|
||||
|
||||
public toString(): string {
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
|
||||
export class BufferedString implements StringBuilder {
|
||||
private decoder = new TextDecoder("utf-8");
|
||||
private buffer: Uint8Array;
|
||||
private bufferOffset = 0;
|
||||
private string = "";
|
||||
public byteLength = 0;
|
||||
|
||||
public constructor(bufferSize: number) {
|
||||
this.buffer = new Uint8Array(bufferSize);
|
||||
}
|
||||
|
||||
public appendChar(char: number): void {
|
||||
if (this.bufferOffset >= this.buffer.length) this.flushStringBuffer();
|
||||
this.buffer[this.bufferOffset++] = char;
|
||||
this.byteLength += 1;
|
||||
}
|
||||
|
||||
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
|
||||
const size = end - start;
|
||||
if (this.bufferOffset + size > this.buffer.length) this.flushStringBuffer();
|
||||
this.buffer.set(buf.subarray(start, end), this.bufferOffset);
|
||||
this.bufferOffset += size;
|
||||
this.byteLength += size;
|
||||
}
|
||||
|
||||
private flushStringBuffer(): void {
|
||||
this.string += this.decoder.decode(
|
||||
this.buffer.subarray(0, this.bufferOffset)
|
||||
);
|
||||
this.bufferOffset = 0;
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this.string = "";
|
||||
this.bufferOffset = 0;
|
||||
this.byteLength = 0;
|
||||
}
|
||||
public toString(): string {
|
||||
this.flushStringBuffer();
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
14
node_modules/@streamparser/json/dist/deno/utils/constants.ts
generated
vendored
Normal file
14
node_modules/@streamparser/json/dist/deno/utils/constants.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
export enum TokenType {
|
||||
LEFT_BRACE = 0x1,
|
||||
RIGHT_BRACE = 0x2,
|
||||
LEFT_BRACKET = 0x3,
|
||||
RIGHT_BRACKET = 0x4,
|
||||
COLON = 0x5,
|
||||
COMMA = 0x6,
|
||||
TRUE = 0x7,
|
||||
FALSE = 0x8,
|
||||
NULL = 0x9,
|
||||
STRING = 0xa,
|
||||
NUMBER = 0xb,
|
||||
SEPARATOR = 0xc,
|
||||
}
|
||||
5
node_modules/@streamparser/json/dist/deno/utils/types.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/dist/deno/utils/types.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export type JsonPrimitive = string | number | boolean | null;
|
||||
export type JsonKey = string | number | undefined;
|
||||
export type JsonObject = { [key: string]: JsonPrimitive | JsonStruct };
|
||||
export type JsonArray = (JsonPrimitive | JsonStruct)[];
|
||||
export type JsonStruct = JsonObject | JsonArray;
|
||||
113
node_modules/@streamparser/json/dist/deno/utils/utf-8.ts
generated
vendored
Normal file
113
node_modules/@streamparser/json/dist/deno/utils/utf-8.ts
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
export enum charset {
|
||||
BACKSPACE = 0x8, // "\b"
|
||||
FORM_FEED = 0xc, // "\f"
|
||||
NEWLINE = 0xa, // "\n"
|
||||
CARRIAGE_RETURN = 0xd, // "\r"
|
||||
TAB = 0x9, // "\t"
|
||||
SPACE = 0x20, //
|
||||
EXCLAMATION_MARK = 0x21, // !
|
||||
QUOTATION_MARK = 0x22, // "
|
||||
NUMBER_SIGN = 0x23, // #
|
||||
DOLLAR_SIGN = 0x24, // $
|
||||
PERCENT_SIGN = 0x25, // %
|
||||
AMPERSAND = 0x26, // &
|
||||
APOSTROPHE = 0x27, // '
|
||||
LEFT_PARENTHESIS = 0x28, // (
|
||||
RIGHT_PARENTHESIS = 0x29, // )
|
||||
ASTERISK = 0x2a, // *
|
||||
PLUS_SIGN = 0x2b, // +
|
||||
COMMA = 0x2c, // ,
|
||||
HYPHEN_MINUS = 0x2d, // -
|
||||
FULL_STOP = 0x2e, // .
|
||||
SOLIDUS = 0x2f, // /
|
||||
DIGIT_ZERO = 0x30, // 0
|
||||
DIGIT_ONE = 0x31, // 1
|
||||
DIGIT_TWO = 0x32, // 2
|
||||
DIGIT_THREE = 0x33, // 3
|
||||
DIGIT_FOUR = 0x34, // 4
|
||||
DIGIT_FIVE = 0x35, // 5
|
||||
DIGIT_SIX = 0x36, // 6
|
||||
DIGIT_SEVEN = 0x37, // 7
|
||||
DIGIT_EIGHT = 0x38, // 8
|
||||
DIGIT_NINE = 0x39, // 9
|
||||
COLON = 0x3a, // =
|
||||
SEMICOLON = 0x3b, // ;
|
||||
LESS_THAN_SIGN = 0x3c, // <
|
||||
EQUALS_SIGN = 0x3d, // =
|
||||
GREATER_THAN_SIGN = 0x3e, // >
|
||||
QUESTION_MARK = 0x3f, // ?
|
||||
COMMERCIAL_AT = 0x40, // @
|
||||
LATIN_CAPITAL_LETTER_A = 0x41, // A
|
||||
LATIN_CAPITAL_LETTER_B = 0x42, // B
|
||||
LATIN_CAPITAL_LETTER_C = 0x43, // C
|
||||
LATIN_CAPITAL_LETTER_D = 0x44, // D
|
||||
LATIN_CAPITAL_LETTER_E = 0x45, // E
|
||||
LATIN_CAPITAL_LETTER_F = 0x46, // F
|
||||
LATIN_CAPITAL_LETTER_G = 0x47, // G
|
||||
LATIN_CAPITAL_LETTER_H = 0x48, // H
|
||||
LATIN_CAPITAL_LETTER_I = 0x49, // I
|
||||
LATIN_CAPITAL_LETTER_J = 0x4a, // J
|
||||
LATIN_CAPITAL_LETTER_K = 0x4b, // K
|
||||
LATIN_CAPITAL_LETTER_L = 0x4c, // L
|
||||
LATIN_CAPITAL_LETTER_M = 0x4d, // M
|
||||
LATIN_CAPITAL_LETTER_N = 0x4e, // N
|
||||
LATIN_CAPITAL_LETTER_O = 0x4f, // O
|
||||
LATIN_CAPITAL_LETTER_P = 0x50, // P
|
||||
LATIN_CAPITAL_LETTER_Q = 0x51, // Q
|
||||
LATIN_CAPITAL_LETTER_R = 0x52, // R
|
||||
LATIN_CAPITAL_LETTER_S = 0x53, // S
|
||||
LATIN_CAPITAL_LETTER_T = 0x54, // T
|
||||
LATIN_CAPITAL_LETTER_U = 0x55, // U
|
||||
LATIN_CAPITAL_LETTER_V = 0x56, // V
|
||||
LATIN_CAPITAL_LETTER_W = 0x57, // W
|
||||
LATIN_CAPITAL_LETTER_X = 0x58, // X
|
||||
LATIN_CAPITAL_LETTER_Y = 0x59, // Y
|
||||
LATIN_CAPITAL_LETTER_Z = 0x5a, // Z
|
||||
LEFT_SQUARE_BRACKET = 0x5b, // [
|
||||
REVERSE_SOLIDUS = 0x5c, // \
|
||||
RIGHT_SQUARE_BRACKET = 0x5d, // ]
|
||||
CIRCUMFLEX_ACCENT = 0x5e, // ^
|
||||
LOW_LINE = 0x5f, // _
|
||||
GRAVE_ACCENT = 0x60, // `
|
||||
LATIN_SMALL_LETTER_A = 0x61, // a
|
||||
LATIN_SMALL_LETTER_B = 0x62, // b
|
||||
LATIN_SMALL_LETTER_C = 0x63, // c
|
||||
LATIN_SMALL_LETTER_D = 0x64, // d
|
||||
LATIN_SMALL_LETTER_E = 0x65, // e
|
||||
LATIN_SMALL_LETTER_F = 0x66, // f
|
||||
LATIN_SMALL_LETTER_G = 0x67, // g
|
||||
LATIN_SMALL_LETTER_H = 0x68, // h
|
||||
LATIN_SMALL_LETTER_I = 0x69, // i
|
||||
LATIN_SMALL_LETTER_J = 0x6a, // j
|
||||
LATIN_SMALL_LETTER_K = 0x6b, // k
|
||||
LATIN_SMALL_LETTER_L = 0x6c, // l
|
||||
LATIN_SMALL_LETTER_M = 0x6d, // m
|
||||
LATIN_SMALL_LETTER_N = 0x6e, // n
|
||||
LATIN_SMALL_LETTER_O = 0x6f, // o
|
||||
LATIN_SMALL_LETTER_P = 0x70, // p
|
||||
LATIN_SMALL_LETTER_Q = 0x71, // q
|
||||
LATIN_SMALL_LETTER_R = 0x72, // r
|
||||
LATIN_SMALL_LETTER_S = 0x73, // s
|
||||
LATIN_SMALL_LETTER_T = 0x74, // t
|
||||
LATIN_SMALL_LETTER_U = 0x75, // u
|
||||
LATIN_SMALL_LETTER_V = 0x76, // v
|
||||
LATIN_SMALL_LETTER_W = 0x77, // w
|
||||
LATIN_SMALL_LETTER_X = 0x78, // x
|
||||
LATIN_SMALL_LETTER_Y = 0x79, // y
|
||||
LATIN_SMALL_LETTER_Z = 0x7a, // z
|
||||
LEFT_CURLY_BRACKET = 0x7b, // {
|
||||
VERTICAL_LINE = 0x7c, // |
|
||||
RIGHT_CURLY_BRACKET = 0x7d, // }
|
||||
TILDE = 0x7e, // ~
|
||||
}
|
||||
|
||||
export const escapedSequences: { [key: number]: number } = {
|
||||
[charset.QUOTATION_MARK]: charset.QUOTATION_MARK,
|
||||
[charset.REVERSE_SOLIDUS]: charset.REVERSE_SOLIDUS,
|
||||
[charset.SOLIDUS]: charset.SOLIDUS,
|
||||
[charset.LATIN_SMALL_LETTER_B]: charset.BACKSPACE,
|
||||
[charset.LATIN_SMALL_LETTER_F]: charset.FORM_FEED,
|
||||
[charset.LATIN_SMALL_LETTER_N]: charset.NEWLINE,
|
||||
[charset.LATIN_SMALL_LETTER_R]: charset.CARRIAGE_RETURN,
|
||||
[charset.LATIN_SMALL_LETTER_T]: charset.TAB,
|
||||
};
|
||||
5
node_modules/@streamparser/json/dist/mjs/index.d.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/dist/mjs/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export { default as Tokenizer } from "./tokenizer";
|
||||
export { default as TokenParser } from "./tokenparser";
|
||||
export { default as JSONParser } from "./jsonparser";
|
||||
export * as utf8 from "./utils/utf-8";
|
||||
export { TokenType } from "./utils/constants";
|
||||
18
node_modules/@streamparser/json/dist/mjs/jsonparser.d.ts
generated
vendored
Normal file
18
node_modules/@streamparser/json/dist/mjs/jsonparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { TokenizerOptions } from "./tokenizer";
|
||||
import { StackElement, TokenParserOptions } from "./tokenparser";
|
||||
import { JsonPrimitive, JsonKey, JsonStruct } from "./utils/types";
|
||||
interface JSONParserOpts extends TokenizerOptions, TokenParserOptions {
|
||||
}
|
||||
export default class JSONParser {
|
||||
private tokenizer;
|
||||
private tokenParser;
|
||||
constructor(opts?: JSONParserOpts);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
end(): void;
|
||||
set onToken(cb: (token: number, value: JsonPrimitive, offset: number) => void);
|
||||
set onValue(cb: (value: JsonPrimitive | JsonStruct, key: JsonKey | undefined, parent: JsonStruct | undefined, stack: StackElement[]) => void);
|
||||
set onError(cb: (err: Error) => void);
|
||||
set onEnd(cb: () => void);
|
||||
}
|
||||
export {};
|
||||
45
node_modules/@streamparser/json/dist/mjs/tokenizer.d.ts
generated
vendored
Normal file
45
node_modules/@streamparser/json/dist/mjs/tokenizer.d.ts
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
export interface TokenizerOptions {
|
||||
stringBufferSize?: number;
|
||||
numberBufferSize?: number;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenizerError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class Tokenizer {
|
||||
private state;
|
||||
private separator?;
|
||||
private separatorBytes?;
|
||||
private separatorIndex;
|
||||
private bufferedString;
|
||||
private bufferedNumber;
|
||||
private unicode;
|
||||
private highSurrogate;
|
||||
private bytes_remaining;
|
||||
private bytes_in_sequence;
|
||||
private char_split_buffer;
|
||||
private encoder;
|
||||
private offset;
|
||||
constructor(opts?: TokenizerOptions);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
private emitNumber;
|
||||
protected parseNumber(numberStr: string): number;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onToken(token: TokenType.LEFT_BRACE, value: "{", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACE, value: "}", offset: number): void;
|
||||
onToken(token: TokenType.LEFT_BRACKET, value: "[", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACKET, value: "]", offset: number): void;
|
||||
onToken(token: TokenType.COLON, value: ":", offset: number): void;
|
||||
onToken(token: TokenType.COMMA, value: ",", offset: number): void;
|
||||
onToken(token: TokenType.TRUE, value: true, offset: number): void;
|
||||
onToken(token: TokenType.FALSE, value: false, offset: number): void;
|
||||
onToken(token: TokenType.NULL, value: null, offset: number): void;
|
||||
onToken(token: TokenType.STRING, value: string, offset: number): void;
|
||||
onToken(token: TokenType.NUMBER, value: number, offset: number): void;
|
||||
onToken(token: TokenType.SEPARATOR, value: string, offset: number): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
55
node_modules/@streamparser/json/dist/mjs/tokenparser.d.ts
generated
vendored
Normal file
55
node_modules/@streamparser/json/dist/mjs/tokenparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
import { JsonPrimitive, JsonKey, JsonObject, JsonArray, JsonStruct } from "./utils/types";
|
||||
export declare enum TokenParserMode {
|
||||
OBJECT = 0,
|
||||
ARRAY = 1
|
||||
}
|
||||
export interface StackElement {
|
||||
key: JsonKey;
|
||||
value: JsonStruct;
|
||||
mode: TokenParserMode | undefined;
|
||||
emit: boolean;
|
||||
}
|
||||
export interface TokenParserOptions {
|
||||
paths?: string[];
|
||||
keepStack?: boolean;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenParserError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class TokenParser {
|
||||
private readonly paths?;
|
||||
private readonly keepStack;
|
||||
private readonly separator?;
|
||||
private state;
|
||||
private mode;
|
||||
private key;
|
||||
private value;
|
||||
private stack;
|
||||
constructor(opts?: TokenParserOptions);
|
||||
private shouldEmit;
|
||||
private push;
|
||||
private pop;
|
||||
private emit;
|
||||
get isEnded(): boolean;
|
||||
write(token: TokenType.LEFT_BRACE, value: "{"): void;
|
||||
write(token: TokenType.RIGHT_BRACE, value: "}"): void;
|
||||
write(token: TokenType.LEFT_BRACKET, value: "["): void;
|
||||
write(token: TokenType.RIGHT_BRACKET, value: "]"): void;
|
||||
write(token: TokenType.COLON, value: ":"): void;
|
||||
write(token: TokenType.COMMA, value: ","): void;
|
||||
write(token: TokenType.TRUE, value: true): void;
|
||||
write(token: TokenType.FALSE, value: false): void;
|
||||
write(token: TokenType.NULL, value: null): void;
|
||||
write(token: TokenType.STRING, value: string): void;
|
||||
write(token: TokenType.NUMBER, value: number): void;
|
||||
write(token: TokenType.SEPARATOR, value: string): void;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: number, parent: JsonArray, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: string, parent: JsonObject, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: undefined, parent: undefined, stack: []): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
29
node_modules/@streamparser/json/dist/mjs/utils/bufferedString.d.ts
generated
vendored
Normal file
29
node_modules/@streamparser/json/dist/mjs/utils/bufferedString.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
export interface StringBuilder {
|
||||
byteLength: number;
|
||||
appendChar: (char: number) => void;
|
||||
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
|
||||
reset: () => void;
|
||||
toString: () => string;
|
||||
}
|
||||
export declare class NonBufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private string;
|
||||
byteLength: number;
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
export declare class BufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private buffer;
|
||||
private bufferOffset;
|
||||
private string;
|
||||
byteLength: number;
|
||||
constructor(bufferSize: number);
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
private flushStringBuffer;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
14
node_modules/@streamparser/json/dist/mjs/utils/constants.d.ts
generated
vendored
Normal file
14
node_modules/@streamparser/json/dist/mjs/utils/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
export declare enum TokenType {
|
||||
LEFT_BRACE = 1,
|
||||
RIGHT_BRACE = 2,
|
||||
LEFT_BRACKET = 3,
|
||||
RIGHT_BRACKET = 4,
|
||||
COLON = 5,
|
||||
COMMA = 6,
|
||||
TRUE = 7,
|
||||
FALSE = 8,
|
||||
NULL = 9,
|
||||
STRING = 10,
|
||||
NUMBER = 11,
|
||||
SEPARATOR = 12
|
||||
}
|
||||
7
node_modules/@streamparser/json/dist/mjs/utils/types.d.ts
generated
vendored
Normal file
7
node_modules/@streamparser/json/dist/mjs/utils/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
export declare type JsonPrimitive = string | number | boolean | null;
|
||||
export declare type JsonKey = string | number | undefined;
|
||||
export declare type JsonObject = {
|
||||
[key: string]: JsonPrimitive | JsonStruct;
|
||||
};
|
||||
export declare type JsonArray = (JsonPrimitive | JsonStruct)[];
|
||||
export declare type JsonStruct = JsonObject | JsonArray;
|
||||
105
node_modules/@streamparser/json/dist/mjs/utils/utf-8.d.ts
generated
vendored
Normal file
105
node_modules/@streamparser/json/dist/mjs/utils/utf-8.d.ts
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
export declare enum charset {
|
||||
BACKSPACE = 8,
|
||||
FORM_FEED = 12,
|
||||
NEWLINE = 10,
|
||||
CARRIAGE_RETURN = 13,
|
||||
TAB = 9,
|
||||
SPACE = 32,
|
||||
EXCLAMATION_MARK = 33,
|
||||
QUOTATION_MARK = 34,
|
||||
NUMBER_SIGN = 35,
|
||||
DOLLAR_SIGN = 36,
|
||||
PERCENT_SIGN = 37,
|
||||
AMPERSAND = 38,
|
||||
APOSTROPHE = 39,
|
||||
LEFT_PARENTHESIS = 40,
|
||||
RIGHT_PARENTHESIS = 41,
|
||||
ASTERISK = 42,
|
||||
PLUS_SIGN = 43,
|
||||
COMMA = 44,
|
||||
HYPHEN_MINUS = 45,
|
||||
FULL_STOP = 46,
|
||||
SOLIDUS = 47,
|
||||
DIGIT_ZERO = 48,
|
||||
DIGIT_ONE = 49,
|
||||
DIGIT_TWO = 50,
|
||||
DIGIT_THREE = 51,
|
||||
DIGIT_FOUR = 52,
|
||||
DIGIT_FIVE = 53,
|
||||
DIGIT_SIX = 54,
|
||||
DIGIT_SEVEN = 55,
|
||||
DIGIT_EIGHT = 56,
|
||||
DIGIT_NINE = 57,
|
||||
COLON = 58,
|
||||
SEMICOLON = 59,
|
||||
LESS_THAN_SIGN = 60,
|
||||
EQUALS_SIGN = 61,
|
||||
GREATER_THAN_SIGN = 62,
|
||||
QUESTION_MARK = 63,
|
||||
COMMERCIAL_AT = 64,
|
||||
LATIN_CAPITAL_LETTER_A = 65,
|
||||
LATIN_CAPITAL_LETTER_B = 66,
|
||||
LATIN_CAPITAL_LETTER_C = 67,
|
||||
LATIN_CAPITAL_LETTER_D = 68,
|
||||
LATIN_CAPITAL_LETTER_E = 69,
|
||||
LATIN_CAPITAL_LETTER_F = 70,
|
||||
LATIN_CAPITAL_LETTER_G = 71,
|
||||
LATIN_CAPITAL_LETTER_H = 72,
|
||||
LATIN_CAPITAL_LETTER_I = 73,
|
||||
LATIN_CAPITAL_LETTER_J = 74,
|
||||
LATIN_CAPITAL_LETTER_K = 75,
|
||||
LATIN_CAPITAL_LETTER_L = 76,
|
||||
LATIN_CAPITAL_LETTER_M = 77,
|
||||
LATIN_CAPITAL_LETTER_N = 78,
|
||||
LATIN_CAPITAL_LETTER_O = 79,
|
||||
LATIN_CAPITAL_LETTER_P = 80,
|
||||
LATIN_CAPITAL_LETTER_Q = 81,
|
||||
LATIN_CAPITAL_LETTER_R = 82,
|
||||
LATIN_CAPITAL_LETTER_S = 83,
|
||||
LATIN_CAPITAL_LETTER_T = 84,
|
||||
LATIN_CAPITAL_LETTER_U = 85,
|
||||
LATIN_CAPITAL_LETTER_V = 86,
|
||||
LATIN_CAPITAL_LETTER_W = 87,
|
||||
LATIN_CAPITAL_LETTER_X = 88,
|
||||
LATIN_CAPITAL_LETTER_Y = 89,
|
||||
LATIN_CAPITAL_LETTER_Z = 90,
|
||||
LEFT_SQUARE_BRACKET = 91,
|
||||
REVERSE_SOLIDUS = 92,
|
||||
RIGHT_SQUARE_BRACKET = 93,
|
||||
CIRCUMFLEX_ACCENT = 94,
|
||||
LOW_LINE = 95,
|
||||
GRAVE_ACCENT = 96,
|
||||
LATIN_SMALL_LETTER_A = 97,
|
||||
LATIN_SMALL_LETTER_B = 98,
|
||||
LATIN_SMALL_LETTER_C = 99,
|
||||
LATIN_SMALL_LETTER_D = 100,
|
||||
LATIN_SMALL_LETTER_E = 101,
|
||||
LATIN_SMALL_LETTER_F = 102,
|
||||
LATIN_SMALL_LETTER_G = 103,
|
||||
LATIN_SMALL_LETTER_H = 104,
|
||||
LATIN_SMALL_LETTER_I = 105,
|
||||
LATIN_SMALL_LETTER_J = 106,
|
||||
LATIN_SMALL_LETTER_K = 107,
|
||||
LATIN_SMALL_LETTER_L = 108,
|
||||
LATIN_SMALL_LETTER_M = 109,
|
||||
LATIN_SMALL_LETTER_N = 110,
|
||||
LATIN_SMALL_LETTER_O = 111,
|
||||
LATIN_SMALL_LETTER_P = 112,
|
||||
LATIN_SMALL_LETTER_Q = 113,
|
||||
LATIN_SMALL_LETTER_R = 114,
|
||||
LATIN_SMALL_LETTER_S = 115,
|
||||
LATIN_SMALL_LETTER_T = 116,
|
||||
LATIN_SMALL_LETTER_U = 117,
|
||||
LATIN_SMALL_LETTER_V = 118,
|
||||
LATIN_SMALL_LETTER_W = 119,
|
||||
LATIN_SMALL_LETTER_X = 120,
|
||||
LATIN_SMALL_LETTER_Y = 121,
|
||||
LATIN_SMALL_LETTER_Z = 122,
|
||||
LEFT_CURLY_BRACKET = 123,
|
||||
VERTICAL_LINE = 124,
|
||||
RIGHT_CURLY_BRACKET = 125,
|
||||
TILDE = 126
|
||||
}
|
||||
export declare const escapedSequences: {
|
||||
[key: number]: number;
|
||||
};
|
||||
5
node_modules/@streamparser/json/dist/umd/index.d.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/dist/umd/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export { default as Tokenizer } from "./tokenizer";
|
||||
export { default as TokenParser } from "./tokenparser";
|
||||
export { default as JSONParser } from "./jsonparser";
|
||||
export * as utf8 from "./utils/utf-8";
|
||||
export { TokenType } from "./utils/constants";
|
||||
1111
node_modules/@streamparser/json/dist/umd/index.js
generated
vendored
Normal file
1111
node_modules/@streamparser/json/dist/umd/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
18
node_modules/@streamparser/json/dist/umd/jsonparser.d.ts
generated
vendored
Normal file
18
node_modules/@streamparser/json/dist/umd/jsonparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
import { TokenizerOptions } from "./tokenizer";
|
||||
import { StackElement, TokenParserOptions } from "./tokenparser";
|
||||
import { JsonPrimitive, JsonKey, JsonStruct } from "./utils/types";
|
||||
interface JSONParserOpts extends TokenizerOptions, TokenParserOptions {
|
||||
}
|
||||
export default class JSONParser {
|
||||
private tokenizer;
|
||||
private tokenParser;
|
||||
constructor(opts?: JSONParserOpts);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
end(): void;
|
||||
set onToken(cb: (token: number, value: JsonPrimitive, offset: number) => void);
|
||||
set onValue(cb: (value: JsonPrimitive | JsonStruct, key: JsonKey | undefined, parent: JsonStruct | undefined, stack: StackElement[]) => void);
|
||||
set onError(cb: (err: Error) => void);
|
||||
set onEnd(cb: () => void);
|
||||
}
|
||||
export {};
|
||||
45
node_modules/@streamparser/json/dist/umd/tokenizer.d.ts
generated
vendored
Normal file
45
node_modules/@streamparser/json/dist/umd/tokenizer.d.ts
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
export interface TokenizerOptions {
|
||||
stringBufferSize?: number;
|
||||
numberBufferSize?: number;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenizerError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class Tokenizer {
|
||||
private state;
|
||||
private separator?;
|
||||
private separatorBytes?;
|
||||
private separatorIndex;
|
||||
private bufferedString;
|
||||
private bufferedNumber;
|
||||
private unicode;
|
||||
private highSurrogate;
|
||||
private bytes_remaining;
|
||||
private bytes_in_sequence;
|
||||
private char_split_buffer;
|
||||
private encoder;
|
||||
private offset;
|
||||
constructor(opts?: TokenizerOptions);
|
||||
get isEnded(): boolean;
|
||||
write(input: Iterable<number> | string): void;
|
||||
private emitNumber;
|
||||
protected parseNumber(numberStr: string): number;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onToken(token: TokenType.LEFT_BRACE, value: "{", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACE, value: "}", offset: number): void;
|
||||
onToken(token: TokenType.LEFT_BRACKET, value: "[", offset: number): void;
|
||||
onToken(token: TokenType.RIGHT_BRACKET, value: "]", offset: number): void;
|
||||
onToken(token: TokenType.COLON, value: ":", offset: number): void;
|
||||
onToken(token: TokenType.COMMA, value: ",", offset: number): void;
|
||||
onToken(token: TokenType.TRUE, value: true, offset: number): void;
|
||||
onToken(token: TokenType.FALSE, value: false, offset: number): void;
|
||||
onToken(token: TokenType.NULL, value: null, offset: number): void;
|
||||
onToken(token: TokenType.STRING, value: string, offset: number): void;
|
||||
onToken(token: TokenType.NUMBER, value: number, offset: number): void;
|
||||
onToken(token: TokenType.SEPARATOR, value: string, offset: number): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
55
node_modules/@streamparser/json/dist/umd/tokenparser.d.ts
generated
vendored
Normal file
55
node_modules/@streamparser/json/dist/umd/tokenparser.d.ts
generated
vendored
Normal file
@ -0,0 +1,55 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
import { JsonPrimitive, JsonKey, JsonObject, JsonArray, JsonStruct } from "./utils/types";
|
||||
export declare enum TokenParserMode {
|
||||
OBJECT = 0,
|
||||
ARRAY = 1
|
||||
}
|
||||
export interface StackElement {
|
||||
key: JsonKey;
|
||||
value: JsonStruct;
|
||||
mode: TokenParserMode | undefined;
|
||||
emit: boolean;
|
||||
}
|
||||
export interface TokenParserOptions {
|
||||
paths?: string[];
|
||||
keepStack?: boolean;
|
||||
separator?: string;
|
||||
}
|
||||
export declare class TokenParserError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
export default class TokenParser {
|
||||
private readonly paths?;
|
||||
private readonly keepStack;
|
||||
private readonly separator?;
|
||||
private state;
|
||||
private mode;
|
||||
private key;
|
||||
private value;
|
||||
private stack;
|
||||
constructor(opts?: TokenParserOptions);
|
||||
private shouldEmit;
|
||||
private push;
|
||||
private pop;
|
||||
private emit;
|
||||
get isEnded(): boolean;
|
||||
write(token: TokenType.LEFT_BRACE, value: "{"): void;
|
||||
write(token: TokenType.RIGHT_BRACE, value: "}"): void;
|
||||
write(token: TokenType.LEFT_BRACKET, value: "["): void;
|
||||
write(token: TokenType.RIGHT_BRACKET, value: "]"): void;
|
||||
write(token: TokenType.COLON, value: ":"): void;
|
||||
write(token: TokenType.COMMA, value: ","): void;
|
||||
write(token: TokenType.TRUE, value: true): void;
|
||||
write(token: TokenType.FALSE, value: false): void;
|
||||
write(token: TokenType.NULL, value: null): void;
|
||||
write(token: TokenType.STRING, value: string): void;
|
||||
write(token: TokenType.NUMBER, value: number): void;
|
||||
write(token: TokenType.SEPARATOR, value: string): void;
|
||||
error(err: Error): void;
|
||||
end(): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: number, parent: JsonArray, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: string, parent: JsonObject, stack: StackElement[]): void;
|
||||
onValue(value: JsonPrimitive | JsonStruct, key: undefined, parent: undefined, stack: []): void;
|
||||
onError(err: Error): void;
|
||||
onEnd(): void;
|
||||
}
|
||||
29
node_modules/@streamparser/json/dist/umd/utils/bufferedString.d.ts
generated
vendored
Normal file
29
node_modules/@streamparser/json/dist/umd/utils/bufferedString.d.ts
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
export interface StringBuilder {
|
||||
byteLength: number;
|
||||
appendChar: (char: number) => void;
|
||||
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
|
||||
reset: () => void;
|
||||
toString: () => string;
|
||||
}
|
||||
export declare class NonBufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private string;
|
||||
byteLength: number;
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
export declare class BufferedString implements StringBuilder {
|
||||
private decoder;
|
||||
private buffer;
|
||||
private bufferOffset;
|
||||
private string;
|
||||
byteLength: number;
|
||||
constructor(bufferSize: number);
|
||||
appendChar(char: number): void;
|
||||
appendBuf(buf: Uint8Array, start?: number, end?: number): void;
|
||||
private flushStringBuffer;
|
||||
reset(): void;
|
||||
toString(): string;
|
||||
}
|
||||
14
node_modules/@streamparser/json/dist/umd/utils/constants.d.ts
generated
vendored
Normal file
14
node_modules/@streamparser/json/dist/umd/utils/constants.d.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
export declare enum TokenType {
|
||||
LEFT_BRACE = 1,
|
||||
RIGHT_BRACE = 2,
|
||||
LEFT_BRACKET = 3,
|
||||
RIGHT_BRACKET = 4,
|
||||
COLON = 5,
|
||||
COMMA = 6,
|
||||
TRUE = 7,
|
||||
FALSE = 8,
|
||||
NULL = 9,
|
||||
STRING = 10,
|
||||
NUMBER = 11,
|
||||
SEPARATOR = 12
|
||||
}
|
||||
7
node_modules/@streamparser/json/dist/umd/utils/types.d.ts
generated
vendored
Normal file
7
node_modules/@streamparser/json/dist/umd/utils/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
export declare type JsonPrimitive = string | number | boolean | null;
|
||||
export declare type JsonKey = string | number | undefined;
|
||||
export declare type JsonObject = {
|
||||
[key: string]: JsonPrimitive | JsonStruct;
|
||||
};
|
||||
export declare type JsonArray = (JsonPrimitive | JsonStruct)[];
|
||||
export declare type JsonStruct = JsonObject | JsonArray;
|
||||
105
node_modules/@streamparser/json/dist/umd/utils/utf-8.d.ts
generated
vendored
Normal file
105
node_modules/@streamparser/json/dist/umd/utils/utf-8.d.ts
generated
vendored
Normal file
@ -0,0 +1,105 @@
|
||||
export declare enum charset {
|
||||
BACKSPACE = 8,
|
||||
FORM_FEED = 12,
|
||||
NEWLINE = 10,
|
||||
CARRIAGE_RETURN = 13,
|
||||
TAB = 9,
|
||||
SPACE = 32,
|
||||
EXCLAMATION_MARK = 33,
|
||||
QUOTATION_MARK = 34,
|
||||
NUMBER_SIGN = 35,
|
||||
DOLLAR_SIGN = 36,
|
||||
PERCENT_SIGN = 37,
|
||||
AMPERSAND = 38,
|
||||
APOSTROPHE = 39,
|
||||
LEFT_PARENTHESIS = 40,
|
||||
RIGHT_PARENTHESIS = 41,
|
||||
ASTERISK = 42,
|
||||
PLUS_SIGN = 43,
|
||||
COMMA = 44,
|
||||
HYPHEN_MINUS = 45,
|
||||
FULL_STOP = 46,
|
||||
SOLIDUS = 47,
|
||||
DIGIT_ZERO = 48,
|
||||
DIGIT_ONE = 49,
|
||||
DIGIT_TWO = 50,
|
||||
DIGIT_THREE = 51,
|
||||
DIGIT_FOUR = 52,
|
||||
DIGIT_FIVE = 53,
|
||||
DIGIT_SIX = 54,
|
||||
DIGIT_SEVEN = 55,
|
||||
DIGIT_EIGHT = 56,
|
||||
DIGIT_NINE = 57,
|
||||
COLON = 58,
|
||||
SEMICOLON = 59,
|
||||
LESS_THAN_SIGN = 60,
|
||||
EQUALS_SIGN = 61,
|
||||
GREATER_THAN_SIGN = 62,
|
||||
QUESTION_MARK = 63,
|
||||
COMMERCIAL_AT = 64,
|
||||
LATIN_CAPITAL_LETTER_A = 65,
|
||||
LATIN_CAPITAL_LETTER_B = 66,
|
||||
LATIN_CAPITAL_LETTER_C = 67,
|
||||
LATIN_CAPITAL_LETTER_D = 68,
|
||||
LATIN_CAPITAL_LETTER_E = 69,
|
||||
LATIN_CAPITAL_LETTER_F = 70,
|
||||
LATIN_CAPITAL_LETTER_G = 71,
|
||||
LATIN_CAPITAL_LETTER_H = 72,
|
||||
LATIN_CAPITAL_LETTER_I = 73,
|
||||
LATIN_CAPITAL_LETTER_J = 74,
|
||||
LATIN_CAPITAL_LETTER_K = 75,
|
||||
LATIN_CAPITAL_LETTER_L = 76,
|
||||
LATIN_CAPITAL_LETTER_M = 77,
|
||||
LATIN_CAPITAL_LETTER_N = 78,
|
||||
LATIN_CAPITAL_LETTER_O = 79,
|
||||
LATIN_CAPITAL_LETTER_P = 80,
|
||||
LATIN_CAPITAL_LETTER_Q = 81,
|
||||
LATIN_CAPITAL_LETTER_R = 82,
|
||||
LATIN_CAPITAL_LETTER_S = 83,
|
||||
LATIN_CAPITAL_LETTER_T = 84,
|
||||
LATIN_CAPITAL_LETTER_U = 85,
|
||||
LATIN_CAPITAL_LETTER_V = 86,
|
||||
LATIN_CAPITAL_LETTER_W = 87,
|
||||
LATIN_CAPITAL_LETTER_X = 88,
|
||||
LATIN_CAPITAL_LETTER_Y = 89,
|
||||
LATIN_CAPITAL_LETTER_Z = 90,
|
||||
LEFT_SQUARE_BRACKET = 91,
|
||||
REVERSE_SOLIDUS = 92,
|
||||
RIGHT_SQUARE_BRACKET = 93,
|
||||
CIRCUMFLEX_ACCENT = 94,
|
||||
LOW_LINE = 95,
|
||||
GRAVE_ACCENT = 96,
|
||||
LATIN_SMALL_LETTER_A = 97,
|
||||
LATIN_SMALL_LETTER_B = 98,
|
||||
LATIN_SMALL_LETTER_C = 99,
|
||||
LATIN_SMALL_LETTER_D = 100,
|
||||
LATIN_SMALL_LETTER_E = 101,
|
||||
LATIN_SMALL_LETTER_F = 102,
|
||||
LATIN_SMALL_LETTER_G = 103,
|
||||
LATIN_SMALL_LETTER_H = 104,
|
||||
LATIN_SMALL_LETTER_I = 105,
|
||||
LATIN_SMALL_LETTER_J = 106,
|
||||
LATIN_SMALL_LETTER_K = 107,
|
||||
LATIN_SMALL_LETTER_L = 108,
|
||||
LATIN_SMALL_LETTER_M = 109,
|
||||
LATIN_SMALL_LETTER_N = 110,
|
||||
LATIN_SMALL_LETTER_O = 111,
|
||||
LATIN_SMALL_LETTER_P = 112,
|
||||
LATIN_SMALL_LETTER_Q = 113,
|
||||
LATIN_SMALL_LETTER_R = 114,
|
||||
LATIN_SMALL_LETTER_S = 115,
|
||||
LATIN_SMALL_LETTER_T = 116,
|
||||
LATIN_SMALL_LETTER_U = 117,
|
||||
LATIN_SMALL_LETTER_V = 118,
|
||||
LATIN_SMALL_LETTER_W = 119,
|
||||
LATIN_SMALL_LETTER_X = 120,
|
||||
LATIN_SMALL_LETTER_Y = 121,
|
||||
LATIN_SMALL_LETTER_Z = 122,
|
||||
LEFT_CURLY_BRACKET = 123,
|
||||
VERTICAL_LINE = 124,
|
||||
RIGHT_CURLY_BRACKET = 125,
|
||||
TILDE = 126
|
||||
}
|
||||
export declare const escapedSequences: {
|
||||
[key: number]: number;
|
||||
};
|
||||
45
node_modules/@streamparser/json/package.json
generated
vendored
Normal file
45
node_modules/@streamparser/json/package.json
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
{
|
||||
"name": "@streamparser/json",
|
||||
"description": "Streaming JSON parser in Javascript for Node.js, Deno and the browser",
|
||||
"version": "0.0.6",
|
||||
"main": "./dist/cjs/index.js",
|
||||
"module": "./dist/mjs/index.js",
|
||||
"browser": "./dist/umd/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"author": "Juanjo Diaz <juanjo.diazmo@gmail.com>",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/juanjoDiaz/streamparser-json.git"
|
||||
},
|
||||
"bugs": "https://github.com/juanjoDiaz/streamparser-json/issues",
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^5.8.0",
|
||||
"@typescript-eslint/parser": "^5.8.0",
|
||||
"eslint": "^8.5.0",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"prettier": "^2.1.2",
|
||||
"rollup": "^2.33.1",
|
||||
"rollup-plugin-typescript2": "^0.31.1",
|
||||
"tap": "^14.10.8",
|
||||
"typescript": "^4.0.5"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint '{src,test,performance,samplejson}/**/*.{js,ts,json}'",
|
||||
"format": "eslint '{src,test,performance,samplejson}/**/*.{js,ts,json}' --fix",
|
||||
"build:deno": "node build.deno.js ./src ./dist/deno",
|
||||
"build:umd": "rollup -c",
|
||||
"build:cjs": "tsc --module commonjs --outDir ./dist/cjs",
|
||||
"build:mjs": "tsc --module esnext --outDir ./dist/mjs && node build.mjs.js ./dist/mjs",
|
||||
"build": "npm run build:umd && npm run build:cjs && npm run build:mjs && npm run build:deno",
|
||||
"prepare": "npm run build",
|
||||
"test": "TS_NODE_SKIP_PROJECT=true tap --ts --timeout=60 test"
|
||||
},
|
||||
"license": "MIT",
|
||||
"tags": [
|
||||
"json",
|
||||
"stream"
|
||||
],
|
||||
"dependencies": {}
|
||||
}
|
||||
61
node_modules/@streamparser/json/performance/index.deno.ts
generated
vendored
Normal file
61
node_modules/@streamparser/json/performance/index.deno.ts
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
import { readFileStrSync } from "https://deno.land/std/fs/mod.ts";
|
||||
import JSONParse2 from "../dist/deno/jsonparser.ts";
|
||||
|
||||
function repeat(str: string, number: number): string {
|
||||
return Array(number).fill(str).join("");
|
||||
}
|
||||
|
||||
console.log("====");
|
||||
console.log("True");
|
||||
console.log("====");
|
||||
benchmark(repeat("true", 1000));
|
||||
|
||||
console.log("=====");
|
||||
console.log("False");
|
||||
console.log("=====");
|
||||
benchmark(repeat("false", 1000));
|
||||
|
||||
console.log("======");
|
||||
console.log("String");
|
||||
console.log("======");
|
||||
benchmark(repeat('"This is a not-very-long text string."', 1000));
|
||||
|
||||
console.log("==============");
|
||||
console.log("Complex object");
|
||||
console.log("==============");
|
||||
benchmark(readFileStrSync("../samplejson/basic.json"));
|
||||
|
||||
console.log("==============================");
|
||||
console.log("Complex object with no numbers");
|
||||
console.log("==============================");
|
||||
benchmark(readFileStrSync("../samplejson/basic-no-numbers.json"));
|
||||
|
||||
console.log("=======================");
|
||||
console.log("Object with many spaces");
|
||||
console.log("=======================");
|
||||
const spaces = Array(1000).fill(" ").join("");
|
||||
benchmark(
|
||||
repeat(
|
||||
`${spaces}{${spaces}"test"${spaces}:${spaces}"asdfasdf"${spaces}}`,
|
||||
1000
|
||||
)
|
||||
);
|
||||
|
||||
console.log("===========");
|
||||
console.log("Long string");
|
||||
console.log("===========");
|
||||
benchmark(`"${Array(100000).fill("a").join("")}"`);
|
||||
|
||||
console.log("===========");
|
||||
console.log("Long number");
|
||||
console.log("===========");
|
||||
benchmark(`${Array(100000).fill("9").join("")}`);
|
||||
|
||||
function benchmark(jsonStr: string): void {
|
||||
const jsonparse2 = new JSONParse2();
|
||||
|
||||
const start = performance.now();
|
||||
jsonparse2.write(jsonStr);
|
||||
const end = performance.now();
|
||||
console.log(`Time: ${end - start} ms.`);
|
||||
}
|
||||
62
node_modules/@streamparser/json/performance/index.node.mjs
generated
vendored
Normal file
62
node_modules/@streamparser/json/performance/index.node.mjs
generated
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
import { performance } from "perf_hooks";
|
||||
import { readFileSync } from "fs";
|
||||
import JSONParser from "../dist/mjs/jsonparse.mjs";
|
||||
|
||||
function repeat(str, number) {
|
||||
return Array(number).fill(str).join("");
|
||||
}
|
||||
|
||||
console.log("====");
|
||||
console.log("True");
|
||||
console.log("====");
|
||||
benchmark(repeat("true", 1000));
|
||||
|
||||
console.log("=====");
|
||||
console.log("False");
|
||||
console.log("=====");
|
||||
benchmark(repeat("false", 1000));
|
||||
|
||||
console.log("======");
|
||||
console.log("String");
|
||||
console.log("======");
|
||||
benchmark(repeat('"This is a not-very-long text string."', 1000));
|
||||
|
||||
console.log("==============");
|
||||
console.log("Complex object");
|
||||
console.log("==============");
|
||||
benchmark(readFileSync("../samplejson/basic.json").toString());
|
||||
|
||||
console.log("==============================");
|
||||
console.log("Complex object with no numbers");
|
||||
console.log("==============================");
|
||||
benchmark(readFileSync("../samplejson/basic-no-numbers.json").toString());
|
||||
|
||||
console.log("=======================");
|
||||
console.log("Object with many spaces");
|
||||
console.log("=======================");
|
||||
const spaces = Array(1000).fill(" ").join("");
|
||||
benchmark(
|
||||
repeat(
|
||||
`${spaces}{${spaces}"test"${spaces}:${spaces}"asdfasdf"${spaces}}`,
|
||||
1000,
|
||||
),
|
||||
);
|
||||
|
||||
console.log("===========");
|
||||
console.log("Long string");
|
||||
console.log("===========");
|
||||
benchmark(`"${Array(100000).fill("a").join("")}"`);
|
||||
|
||||
console.log("===========");
|
||||
console.log("Long number");
|
||||
console.log("===========");
|
||||
benchmark(`${Array(100000).fill("9").join("")}`);
|
||||
|
||||
function benchmark(jsonStr) {
|
||||
const jsonparser = new JSONParser();
|
||||
|
||||
const start = performance.now();
|
||||
jsonparser.write(jsonStr);
|
||||
const end = performance.now();
|
||||
console.log(`Time: ${end - start} ms.`);
|
||||
}
|
||||
32
node_modules/@streamparser/json/performance/package-lock.json
generated
vendored
Normal file
32
node_modules/@streamparser/json/performance/package-lock.json
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "@streamparser/json-performance",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"benchmark": {
|
||||
"version": "2.1.4",
|
||||
"resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz",
|
||||
"integrity": "sha1-CfPeMckWQl1JjMLuVloOvzwqVik=",
|
||||
"requires": {
|
||||
"lodash": "^4.17.4",
|
||||
"platform": "^1.3.3"
|
||||
}
|
||||
},
|
||||
"jsonparse": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
|
||||
"integrity": "sha1-P02uSpH6wxX3EGL4UhzCOfE2YoA="
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.19",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
|
||||
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ=="
|
||||
},
|
||||
"platform": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/platform/-/platform-1.3.5.tgz",
|
||||
"integrity": "sha512-TuvHS8AOIZNAlE77WUDiR4rySV/VMptyMfcfeoMgs4P8apaZM3JrnbzBiixKUv+XR6i+BXrQh8WAnjaSPFO65Q=="
|
||||
}
|
||||
}
|
||||
}
|
||||
17
node_modules/@streamparser/json/performance/package.json
generated
vendored
Normal file
17
node_modules/@streamparser/json/performance/package.json
generated
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "@streamparser/json-performance",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"deno": "deno run --unstable --allow-read index.deno.ts",
|
||||
"node": "node index.node.mjs ",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"benchmark": "^2.1.4",
|
||||
"jsonparse": "^1.3.1"
|
||||
}
|
||||
}
|
||||
22
node_modules/@streamparser/json/rollup.config.js
generated
vendored
Normal file
22
node_modules/@streamparser/json/rollup.config.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
import typescript from "rollup-plugin-typescript2";
|
||||
import pkg from "./package.json";
|
||||
|
||||
export default [
|
||||
{
|
||||
input: "src/index.ts",
|
||||
output: {
|
||||
file: pkg.browser,
|
||||
format: "umd",
|
||||
name: "jsonparse",
|
||||
},
|
||||
plugins: [
|
||||
typescript({
|
||||
tsconfigOverride: {
|
||||
compilerOptions: {
|
||||
target: "es5",
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
];
|
||||
353
node_modules/@streamparser/json/samplejson/basic-no-numbers.json
generated
vendored
Normal file
353
node_modules/@streamparser/json/samplejson/basic-no-numbers.json
generated
vendored
Normal file
@ -0,0 +1,353 @@
|
||||
[
|
||||
{},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"x": "0.5e+1",
|
||||
"y": "0.5",
|
||||
"z": "0.8e-0",
|
||||
"w": "0.5e5",
|
||||
"u": "2E10",
|
||||
"foo": "2E+1",
|
||||
"bar": "2E-0",
|
||||
"width": "47",
|
||||
"height": "47"
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": [],
|
||||
"3": ["2", "6"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#248",
|
||||
"stroke": "#48f",
|
||||
"points": [
|
||||
["0.5", "47.5"],
|
||||
["47.5", "47.5"],
|
||||
["47.5", "0.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": ["1"],
|
||||
"3": ["2"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": false, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#248",
|
||||
"stroke": "#48f",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["47.5", "47.5"],
|
||||
["0.5", "47.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"solid": {
|
||||
"1": ["2"],
|
||||
"2": ["3"],
|
||||
"3": ["2", "6"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": false }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["47.5", "47.5"],
|
||||
["47.5", "0.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": [],
|
||||
"3": ["2", "6"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": false, "3": true, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["0.5", "47.5"],
|
||||
["47.5", "0.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": [],
|
||||
"3": ["2", "6"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": false, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#482",
|
||||
"stroke": "#8f4",
|
||||
"points": [
|
||||
["0.5", "47.5"],
|
||||
["0.5", "23.5"],
|
||||
["24.5", "23.5"],
|
||||
["24.5", "0.5"],
|
||||
["47.5", "0.5"],
|
||||
["47.5", "47.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": [],
|
||||
"3": ["6", "2"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": false, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#482",
|
||||
"stroke": "#8f4",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["23.5", "0.5"],
|
||||
["23.5", "24.5"],
|
||||
["47.5", "24.5"],
|
||||
["47.5", "47.5"],
|
||||
["0.5", "47.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["4", "2"],
|
||||
"2": [],
|
||||
"3": ["2", "6"],
|
||||
"4": [],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": [],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": false }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#ff0",
|
||||
"stroke": "#ff8",
|
||||
"cx": "24",
|
||||
"cy": "24",
|
||||
"r": "18"
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#842",
|
||||
"stroke": "#f84",
|
||||
"points": [
|
||||
["4.5", "0.5"],
|
||||
["14.5", "0.5"],
|
||||
["14.5", "17.5"],
|
||||
["34", "17.5"],
|
||||
["33.5", "0.5"],
|
||||
["43.5", "0.5"],
|
||||
["43.5", "47.5"],
|
||||
["33.5", "47.5"],
|
||||
["33.5", "30.5"],
|
||||
["14.5", "30.5"],
|
||||
["14.5", "47.5"],
|
||||
["4.5", "47.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3"
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["47.5", "0.5"],
|
||||
["24", "47.5"]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": "3",
|
||||
"solid": {
|
||||
"1": ["2", "4"],
|
||||
"2": [],
|
||||
"3": ["2", "6"],
|
||||
"4": ["1"],
|
||||
"5": ["2", "8", "1", "3", "7", "9", "4", "6"],
|
||||
"6": ["3"],
|
||||
"7": ["4", "8"],
|
||||
"8": [],
|
||||
"9": ["6", "8"]
|
||||
},
|
||||
"corners": { "1": false, "3": false, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "#114acb",
|
||||
"x": "0.5",
|
||||
"y": "0.5",
|
||||
"width": "47",
|
||||
"height": "47"
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgba(255,255,255,0.30)",
|
||||
"points": [
|
||||
["0.5", "0.5"],
|
||||
["47.5", "0.5"],
|
||||
["40", "8"],
|
||||
["8", "8"],
|
||||
["8", "40"],
|
||||
["0.5", "47.5"]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgba(0,0,0,0.30)",
|
||||
"points": [
|
||||
["47.5", "0.5"],
|
||||
["48", "48"],
|
||||
["0.5", "47.5"],
|
||||
["8", "40"],
|
||||
["40", "40"],
|
||||
["40", "8"]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgb(255,255,0)",
|
||||
"stroke": "rgba(255,255,0,0.5)",
|
||||
"points": [
|
||||
["24", "9"],
|
||||
["35", "20"],
|
||||
["26", "29"],
|
||||
["26", "33"],
|
||||
["22", "33"],
|
||||
["22", "27"],
|
||||
["29", "20"],
|
||||
["24", "15"],
|
||||
["16", "23"],
|
||||
["13", "20"]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "rgb(255,255,0)",
|
||||
"stroke": "rgba(255,255,0,0.5)",
|
||||
"x": "22",
|
||||
"y": "35",
|
||||
"width": "4",
|
||||
"height": "4"
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#80f",
|
||||
"stroke": "#88f",
|
||||
"cx": "24",
|
||||
"cy": "24",
|
||||
"r": "18"
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#4f4",
|
||||
"stroke": "#8f8",
|
||||
"cx": "24",
|
||||
"cy": "24",
|
||||
"r": "18"
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
}
|
||||
]
|
||||
353
node_modules/@streamparser/json/samplejson/basic.json
generated
vendored
Normal file
353
node_modules/@streamparser/json/samplejson/basic.json
generated
vendored
Normal file
@ -0,0 +1,353 @@
|
||||
[
|
||||
{},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"x": 0.5e1,
|
||||
"y": 0.5,
|
||||
"z": 0.8,
|
||||
"w": 0.5e5,
|
||||
"u": 2e10,
|
||||
"foo": 2e1,
|
||||
"bar": 2,
|
||||
"width": 47,
|
||||
"height": 47
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [],
|
||||
"3": [2, 6],
|
||||
"4": [],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#248",
|
||||
"stroke": "#48f",
|
||||
"points": [
|
||||
[0.5, 47.5],
|
||||
[47.5, 47.5],
|
||||
[47.5, 0.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [1],
|
||||
"3": [2],
|
||||
"4": [],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": false, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#248",
|
||||
"stroke": "#48f",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[47.5, 47.5],
|
||||
[0.5, 47.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"solid": {
|
||||
"1": [2],
|
||||
"2": [3],
|
||||
"3": [2, 6],
|
||||
"4": [],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": false }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[47.5, 47.5],
|
||||
[47.5, 0.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [],
|
||||
"3": [2, 6],
|
||||
"4": [],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [3],
|
||||
"7": [4, 8],
|
||||
"8": [7],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": false, "3": true, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[0.5, 47.5],
|
||||
[47.5, 0.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [],
|
||||
"3": [2, 6],
|
||||
"4": [1],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [],
|
||||
"7": [4, 8],
|
||||
"8": [9],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": false, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#482",
|
||||
"stroke": "#8f4",
|
||||
"points": [
|
||||
[0.5, 47.5],
|
||||
[0.5, 23.5],
|
||||
[24.5, 23.5],
|
||||
[24.5, 0.5],
|
||||
[47.5, 0.5],
|
||||
[47.5, 47.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [],
|
||||
"3": [6, 2],
|
||||
"4": [],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [9],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": false, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#482",
|
||||
"stroke": "#8f4",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[23.5, 0.5],
|
||||
[23.5, 24.5],
|
||||
[47.5, 24.5],
|
||||
[47.5, 47.5],
|
||||
[0.5, 47.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [4, 2],
|
||||
"2": [],
|
||||
"3": [2, 6],
|
||||
"4": [7],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": true, "3": true, "7": true, "9": false }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#ff0",
|
||||
"stroke": "#ff8",
|
||||
"cx": 24,
|
||||
"cy": 24,
|
||||
"r": 18
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#842",
|
||||
"stroke": "#f84",
|
||||
"points": [
|
||||
[4.5, 0.5],
|
||||
[14.5, 0.5],
|
||||
[14.5, 17.5],
|
||||
[34, 17.5],
|
||||
[33.5, 0.5],
|
||||
[43.5, 0.5],
|
||||
[43.5, 47.5],
|
||||
[33.5, 47.5],
|
||||
[33.5, 30.5],
|
||||
[14.5, 30.5],
|
||||
[14.5, 47.5],
|
||||
[4.5, 47.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "#333",
|
||||
"stroke": "#999",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[47.5, 0.5],
|
||||
[24, 47.5]
|
||||
]
|
||||
}
|
||||
],
|
||||
"jumpable": 3,
|
||||
"solid": {
|
||||
"1": [2, 4],
|
||||
"2": [],
|
||||
"3": [2, 6],
|
||||
"4": [1],
|
||||
"5": [2, 8, 1, 3, 7, 9, 4, 6],
|
||||
"6": [3],
|
||||
"7": [4, 8],
|
||||
"8": [],
|
||||
"9": [6, 8]
|
||||
},
|
||||
"corners": { "1": false, "3": false, "7": true, "9": true }
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "#114acb",
|
||||
"x": 0.5,
|
||||
"y": 0.5,
|
||||
"width": 47,
|
||||
"height": 47
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgba(255,255,255,0.30)",
|
||||
"points": [
|
||||
[0.5, 0.5],
|
||||
[47.5, 0.5],
|
||||
[40, 8],
|
||||
[8, 8],
|
||||
[8, 40],
|
||||
[0.5, 47.5]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgba(0,0,0,0.30)",
|
||||
"points": [
|
||||
[47.5, 0.5],
|
||||
[48, 48],
|
||||
[0.5, 47.5],
|
||||
[8, 40],
|
||||
[40, 40],
|
||||
[40, 8]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "polygon",
|
||||
"fill": "rgb(255,255,0)",
|
||||
"stroke": "rgba(255,255,0,0.5)",
|
||||
"points": [
|
||||
[24, 9],
|
||||
[35, 20],
|
||||
[26, 29],
|
||||
[26, 33],
|
||||
[22, 33],
|
||||
[22, 27],
|
||||
[29, 20],
|
||||
[24, 15],
|
||||
[16, 23],
|
||||
[13, 20]
|
||||
]
|
||||
},
|
||||
{
|
||||
"shape": "rect",
|
||||
"fill": "rgb(255,255,0)",
|
||||
"stroke": "rgba(255,255,0,0.5)",
|
||||
"x": 22,
|
||||
"y": 35,
|
||||
"width": 4,
|
||||
"height": 4
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#80f",
|
||||
"stroke": "#88f",
|
||||
"cx": 24,
|
||||
"cy": 24,
|
||||
"r": 18
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
},
|
||||
{
|
||||
"image": [
|
||||
{
|
||||
"shape": "circle",
|
||||
"fill": "#4f4",
|
||||
"stroke": "#8f8",
|
||||
"cx": 24,
|
||||
"cy": 24,
|
||||
"r": 18
|
||||
}
|
||||
],
|
||||
"item": true
|
||||
}
|
||||
]
|
||||
5
node_modules/@streamparser/json/src/index.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/src/index.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export { default as Tokenizer } from "./tokenizer";
|
||||
export { default as TokenParser } from "./tokenparser";
|
||||
export { default as JSONParser } from "./jsonparser";
|
||||
export * as utf8 from "./utils/utf-8";
|
||||
export { TokenType } from "./utils/constants";
|
||||
65
node_modules/@streamparser/json/src/jsonparser.ts
generated
vendored
Normal file
65
node_modules/@streamparser/json/src/jsonparser.ts
generated
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
import Tokenizer, { TokenizerOptions } from "./tokenizer";
|
||||
import TokenParser, { StackElement, TokenParserOptions } from "./tokenparser";
|
||||
import { JsonPrimitive, JsonKey, JsonStruct } from "./utils/types";
|
||||
|
||||
interface JSONParserOpts extends TokenizerOptions, TokenParserOptions {}
|
||||
|
||||
export default class JSONParser {
|
||||
private tokenizer: Tokenizer;
|
||||
private tokenParser: TokenParser;
|
||||
|
||||
constructor(opts: JSONParserOpts = {}) {
|
||||
this.tokenizer = new Tokenizer(opts);
|
||||
this.tokenParser = new TokenParser(opts);
|
||||
|
||||
this.tokenizer.onToken = this.tokenParser.write.bind(this.tokenParser);
|
||||
this.tokenizer.onEnd = () => {
|
||||
if (!this.tokenParser.isEnded) this.tokenParser.end();
|
||||
};
|
||||
|
||||
this.tokenParser.onError = this.tokenizer.error.bind(this.tokenizer);
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded) this.tokenizer.end();
|
||||
};
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.tokenizer.isEnded && this.tokenParser.isEnded;
|
||||
}
|
||||
|
||||
public write(input: Iterable<number> | string): void {
|
||||
this.tokenizer.write(input);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
this.tokenizer.end();
|
||||
}
|
||||
|
||||
public set onToken(
|
||||
cb: (token: number, value: JsonPrimitive, offset: number) => void
|
||||
) {
|
||||
this.tokenizer.onToken = cb;
|
||||
}
|
||||
|
||||
public set onValue(
|
||||
cb: (
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: JsonKey | undefined,
|
||||
parent: JsonStruct | undefined,
|
||||
stack: StackElement[]
|
||||
) => void
|
||||
) {
|
||||
this.tokenParser.onValue = cb;
|
||||
}
|
||||
|
||||
public set onError(cb: (err: Error) => void) {
|
||||
this.tokenizer.onError = cb;
|
||||
}
|
||||
|
||||
public set onEnd(cb: () => void) {
|
||||
this.tokenParser.onEnd = () => {
|
||||
if (!this.tokenizer.isEnded) this.tokenizer.end();
|
||||
cb.call(this.tokenParser);
|
||||
};
|
||||
}
|
||||
}
|
||||
678
node_modules/@streamparser/json/src/tokenizer.ts
generated
vendored
Normal file
678
node_modules/@streamparser/json/src/tokenizer.ts
generated
vendored
Normal file
@ -0,0 +1,678 @@
|
||||
import { charset, escapedSequences } from "./utils/utf-8";
|
||||
import {
|
||||
StringBuilder,
|
||||
NonBufferedString,
|
||||
BufferedString,
|
||||
} from "./utils/bufferedString";
|
||||
import { TokenType } from "./utils/constants";
|
||||
import { JsonPrimitive } from "./utils/types";
|
||||
|
||||
const {
|
||||
LEFT_BRACE,
|
||||
RIGHT_BRACE,
|
||||
LEFT_BRACKET,
|
||||
RIGHT_BRACKET,
|
||||
COLON,
|
||||
COMMA,
|
||||
TRUE,
|
||||
FALSE,
|
||||
NULL,
|
||||
STRING,
|
||||
NUMBER,
|
||||
} = TokenType;
|
||||
|
||||
// Tokenizer States
|
||||
enum TokenizerStates {
|
||||
START,
|
||||
ENDED,
|
||||
ERROR,
|
||||
TRUE1,
|
||||
TRUE2,
|
||||
TRUE3,
|
||||
FALSE1,
|
||||
FALSE2,
|
||||
FALSE3,
|
||||
FALSE4,
|
||||
NULL1,
|
||||
NULL2,
|
||||
NULL3,
|
||||
STRING_DEFAULT,
|
||||
STRING_AFTER_BACKSLASH,
|
||||
STRING_UNICODE_DIGIT_1,
|
||||
STRING_UNICODE_DIGIT_2,
|
||||
STRING_UNICODE_DIGIT_3,
|
||||
STRING_UNICODE_DIGIT_4,
|
||||
STRING_INCOMPLETE_CHAR,
|
||||
NUMBER_AFTER_INITIAL_MINUS,
|
||||
NUMBER_AFTER_INITIAL_ZERO,
|
||||
NUMBER_AFTER_INITIAL_NON_ZERO,
|
||||
NUMBER_AFTER_FULL_STOP,
|
||||
NUMBER_AFTER_DECIMAL,
|
||||
NUMBER_AFTER_E,
|
||||
NUMBER_AFTER_E_AND_SIGN,
|
||||
NUMBER_AFTER_E_AND_DIGIT,
|
||||
SEPARATOR,
|
||||
}
|
||||
|
||||
export interface TokenizerOptions {
|
||||
stringBufferSize?: number;
|
||||
numberBufferSize?: number;
|
||||
separator?: string;
|
||||
}
|
||||
|
||||
const defaultOpts: TokenizerOptions = {
|
||||
stringBufferSize: 0,
|
||||
numberBufferSize: 0,
|
||||
separator: undefined,
|
||||
};
|
||||
|
||||
export class TokenizerError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenizerError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export default class Tokenizer {
|
||||
private state = TokenizerStates.START;
|
||||
|
||||
private separator?: string;
|
||||
private separatorBytes?: Uint8Array;
|
||||
private separatorIndex = 0;
|
||||
private bufferedString: StringBuilder;
|
||||
private bufferedNumber: StringBuilder;
|
||||
|
||||
private unicode: string | undefined = undefined; // unicode escapes
|
||||
private highSurrogate: number | undefined = undefined;
|
||||
private bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
|
||||
private bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
|
||||
private char_split_buffer = new Uint8Array(4); // for rebuilding chars split before boundary is reached
|
||||
private encoder = new TextEncoder();
|
||||
private offset = -1;
|
||||
|
||||
constructor(opts?: TokenizerOptions) {
|
||||
opts = { ...defaultOpts, ...opts };
|
||||
|
||||
this.bufferedString =
|
||||
opts.stringBufferSize && opts.stringBufferSize > 4
|
||||
? new BufferedString(opts.stringBufferSize)
|
||||
: new NonBufferedString();
|
||||
this.bufferedNumber =
|
||||
opts.numberBufferSize && opts.numberBufferSize > 0
|
||||
? new BufferedString(opts.numberBufferSize)
|
||||
: new NonBufferedString();
|
||||
|
||||
this.separator = opts.separator;
|
||||
this.separatorBytes = opts.separator
|
||||
? this.encoder.encode(opts.separator)
|
||||
: undefined;
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.state === TokenizerStates.ENDED;
|
||||
}
|
||||
|
||||
public write(input: Iterable<number> | string): void {
|
||||
let buffer: Uint8Array;
|
||||
if (input instanceof Uint8Array) {
|
||||
buffer = input;
|
||||
} else if (typeof input === "string") {
|
||||
buffer = this.encoder.encode(input);
|
||||
} else if (
|
||||
(typeof input === "object" && "buffer" in input) ||
|
||||
Array.isArray(input)
|
||||
) {
|
||||
buffer = Uint8Array.from(input);
|
||||
} else {
|
||||
this.error(
|
||||
new TypeError(
|
||||
"Unexpected type. The `write` function only accepts Arrays, TypedArrays and Strings."
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
for (let i = 0; i < buffer.length; i += 1) {
|
||||
const n = buffer[i]; // get current byte from buffer
|
||||
switch (this.state) {
|
||||
case TokenizerStates.START:
|
||||
this.offset += 1;
|
||||
|
||||
if (this.separatorBytes && n === this.separatorBytes[0]) {
|
||||
if (this.separatorBytes.length === 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(
|
||||
TokenType.SEPARATOR,
|
||||
this.separator as string,
|
||||
this.offset + this.separatorBytes.length - 1
|
||||
);
|
||||
continue;
|
||||
}
|
||||
this.state = TokenizerStates.SEPARATOR;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.SPACE ||
|
||||
n === charset.NEWLINE ||
|
||||
n === charset.CARRIAGE_RETURN ||
|
||||
n === charset.TAB
|
||||
) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LEFT_CURLY_BRACKET) {
|
||||
this.onToken(LEFT_BRACE, "{", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.RIGHT_CURLY_BRACKET) {
|
||||
this.onToken(RIGHT_BRACE, "}", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.LEFT_SQUARE_BRACKET) {
|
||||
this.onToken(LEFT_BRACKET, "[", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.RIGHT_SQUARE_BRACKET) {
|
||||
this.onToken(RIGHT_BRACKET, "]", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.COLON) {
|
||||
this.onToken(COLON, ":", this.offset);
|
||||
continue;
|
||||
}
|
||||
if (n === charset.COMMA) {
|
||||
this.onToken(COMMA, ",", this.offset);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_T) {
|
||||
this.state = TokenizerStates.TRUE1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_F) {
|
||||
this.state = TokenizerStates.FALSE1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_N) {
|
||||
this.state = TokenizerStates.NULL1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.QUOTATION_MARK) {
|
||||
this.bufferedString.reset();
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.reset();
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_MINUS;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
// STRING
|
||||
case TokenizerStates.STRING_DEFAULT:
|
||||
if (n === charset.QUOTATION_MARK) {
|
||||
const string = this.bufferedString.toString();
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(STRING, string, this.offset);
|
||||
this.offset += this.bufferedString.byteLength + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.REVERSE_SOLIDUS) {
|
||||
this.state = TokenizerStates.STRING_AFTER_BACKSLASH;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= 128) {
|
||||
// Parse multi byte (>=128) chars one at a time
|
||||
if (n >= 194 && n <= 223) {
|
||||
this.bytes_in_sequence = 2;
|
||||
} else if (n <= 239) {
|
||||
this.bytes_in_sequence = 3;
|
||||
} else {
|
||||
this.bytes_in_sequence = 4;
|
||||
}
|
||||
|
||||
if (this.bytes_in_sequence <= buffer.length - i) {
|
||||
// if bytes needed to complete char fall outside buffer length, we have a boundary split
|
||||
this.bufferedString.appendBuf(
|
||||
buffer,
|
||||
i,
|
||||
i + this.bytes_in_sequence
|
||||
);
|
||||
i += this.bytes_in_sequence - 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
this.bytes_remaining = i + this.bytes_in_sequence - buffer.length;
|
||||
this.char_split_buffer.set(buffer.subarray(i));
|
||||
i = buffer.length - 1;
|
||||
this.state = TokenizerStates.STRING_INCOMPLETE_CHAR;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.SPACE) {
|
||||
this.bufferedString.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.STRING_INCOMPLETE_CHAR:
|
||||
// check for carry over of a multi byte char split between data chunks
|
||||
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
|
||||
this.char_split_buffer.set(
|
||||
buffer.subarray(i, i + this.bytes_remaining),
|
||||
this.bytes_in_sequence - this.bytes_remaining
|
||||
);
|
||||
this.bufferedString.appendBuf(
|
||||
this.char_split_buffer,
|
||||
0,
|
||||
this.bytes_in_sequence
|
||||
);
|
||||
i = this.bytes_remaining - 1;
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
case TokenizerStates.STRING_AFTER_BACKSLASH:
|
||||
const controlChar = escapedSequences[n];
|
||||
if (controlChar) {
|
||||
this.bufferedString.appendChar(controlChar);
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.unicode = "";
|
||||
this.state = TokenizerStates.STRING_UNICODE_DIGIT_1;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_1:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_2:
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_3:
|
||||
if (
|
||||
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
|
||||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= charset.LATIN_SMALL_LETTER_F)
|
||||
) {
|
||||
this.unicode += String.fromCharCode(n);
|
||||
this.state += 1;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.STRING_UNICODE_DIGIT_4:
|
||||
if (
|
||||
(n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) ||
|
||||
(n >= charset.LATIN_CAPITAL_LETTER_A &&
|
||||
n <= charset.LATIN_CAPITAL_LETTER_F) ||
|
||||
(n >= charset.LATIN_SMALL_LETTER_A &&
|
||||
n <= charset.LATIN_SMALL_LETTER_F)
|
||||
) {
|
||||
const intVal = parseInt(this.unicode + String.fromCharCode(n), 16);
|
||||
if (this.highSurrogate === undefined) {
|
||||
if (intVal >= 0xd800 && intVal <= 0xdbff) {
|
||||
//<55296,56319> - highSurrogate
|
||||
this.highSurrogate = intVal;
|
||||
} else {
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(String.fromCharCode(intVal))
|
||||
);
|
||||
}
|
||||
} else {
|
||||
if (intVal >= 0xdc00 && intVal <= 0xdfff) {
|
||||
//<56320,57343> - lowSurrogate
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(
|
||||
String.fromCharCode(this.highSurrogate, intVal)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
this.bufferedString.appendBuf(
|
||||
this.encoder.encode(String.fromCharCode(this.highSurrogate))
|
||||
);
|
||||
}
|
||||
this.highSurrogate = undefined;
|
||||
}
|
||||
this.state = TokenizerStates.STRING_DEFAULT;
|
||||
continue;
|
||||
}
|
||||
// Number
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_MINUS:
|
||||
if (n === charset.DIGIT_ZERO) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n >= charset.DIGIT_ONE && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
if (n === charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (n === charset.FULL_STOP) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_FULL_STOP;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_FULL_STOP:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_DECIMAL;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
n === charset.LATIN_SMALL_LETTER_E ||
|
||||
n === charset.LATIN_CAPITAL_LETTER_E
|
||||
) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E;
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
case TokenizerStates.NUMBER_AFTER_E:
|
||||
if (n === charset.PLUS_SIGN || n === charset.HYPHEN_MINUS) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_SIGN;
|
||||
continue;
|
||||
}
|
||||
// Allow cascading
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_SIGN:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
this.state = TokenizerStates.NUMBER_AFTER_E_AND_DIGIT;
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
if (n >= charset.DIGIT_ZERO && n <= charset.DIGIT_NINE) {
|
||||
this.bufferedNumber.appendChar(n);
|
||||
continue;
|
||||
}
|
||||
|
||||
i -= 1;
|
||||
this.state = TokenizerStates.START;
|
||||
this.emitNumber();
|
||||
continue;
|
||||
// TRUE
|
||||
case TokenizerStates.TRUE1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_R) {
|
||||
this.state = TokenizerStates.TRUE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.TRUE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.TRUE3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(TRUE, true, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// FALSE
|
||||
case TokenizerStates.FALSE1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_A) {
|
||||
this.state = TokenizerStates.FALSE2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.FALSE3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_S) {
|
||||
this.state = TokenizerStates.FALSE4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.FALSE4:
|
||||
if (n === charset.LATIN_SMALL_LETTER_E) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(FALSE, false, this.offset);
|
||||
this.offset += 4;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
// NULL
|
||||
case TokenizerStates.NULL1:
|
||||
if (n === charset.LATIN_SMALL_LETTER_U) {
|
||||
this.state = TokenizerStates.NULL2;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL2:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.NULL3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.NULL3:
|
||||
if (n === charset.LATIN_SMALL_LETTER_L) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(NULL, null, this.offset);
|
||||
this.offset += 3;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.separatorIndex += 1;
|
||||
if (
|
||||
!this.separatorBytes ||
|
||||
n !== this.separatorBytes[this.separatorIndex]
|
||||
) {
|
||||
break;
|
||||
}
|
||||
if (this.separatorIndex === this.separatorBytes.length - 1) {
|
||||
this.state = TokenizerStates.START;
|
||||
this.onToken(
|
||||
TokenType.SEPARATOR,
|
||||
this.separator as string,
|
||||
this.offset + this.separatorIndex
|
||||
);
|
||||
this.separatorIndex = 0;
|
||||
}
|
||||
continue;
|
||||
case TokenizerStates.ENDED:
|
||||
if (
|
||||
n === charset.SPACE ||
|
||||
n === charset.NEWLINE ||
|
||||
n === charset.CARRIAGE_RETURN ||
|
||||
n === charset.TAB
|
||||
) {
|
||||
// whitespace
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(
|
||||
new TokenizerError(
|
||||
`Unexpected "${String.fromCharCode(n)}" at position "${i}" in state ${
|
||||
TokenizerStates[this.state]
|
||||
}`
|
||||
)
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private emitNumber(): void {
|
||||
this.onToken(
|
||||
NUMBER,
|
||||
this.parseNumber(this.bufferedNumber.toString()),
|
||||
this.offset
|
||||
);
|
||||
this.offset += this.bufferedNumber.byteLength - 1;
|
||||
}
|
||||
|
||||
protected parseNumber(numberStr: string): number {
|
||||
return Number(numberStr);
|
||||
}
|
||||
|
||||
public error(err: Error): void {
|
||||
if (this.state !== TokenizerStates.ENDED) {
|
||||
this.state = TokenizerStates.ERROR;
|
||||
}
|
||||
|
||||
this.onError(err);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
switch (this.state) {
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_INITIAL_NON_ZERO:
|
||||
case TokenizerStates.NUMBER_AFTER_DECIMAL:
|
||||
case TokenizerStates.NUMBER_AFTER_E_AND_DIGIT:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.emitNumber();
|
||||
this.onEnd();
|
||||
break;
|
||||
case TokenizerStates.START:
|
||||
case TokenizerStates.ERROR:
|
||||
case TokenizerStates.SEPARATOR:
|
||||
this.state = TokenizerStates.ENDED;
|
||||
this.onEnd();
|
||||
break;
|
||||
default:
|
||||
this.error(
|
||||
new TokenizerError(
|
||||
`Tokenizer ended in the middle of a token (state: ${
|
||||
TokenizerStates[this.state]
|
||||
}). Either not all the data was received or the data was invalid.`
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
public onToken(token: TokenType.LEFT_BRACE, value: "{", offset: number): void;
|
||||
public onToken(
|
||||
token: TokenType.RIGHT_BRACE,
|
||||
value: "}",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(
|
||||
token: TokenType.LEFT_BRACKET,
|
||||
value: "[",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(
|
||||
token: TokenType.RIGHT_BRACKET,
|
||||
value: "]",
|
||||
offset: number
|
||||
): void;
|
||||
public onToken(token: TokenType.COLON, value: ":", offset: number): void;
|
||||
public onToken(token: TokenType.COMMA, value: ",", offset: number): void;
|
||||
public onToken(token: TokenType.TRUE, value: true, offset: number): void;
|
||||
public onToken(token: TokenType.FALSE, value: false, offset: number): void;
|
||||
public onToken(token: TokenType.NULL, value: null, offset: number): void;
|
||||
public onToken(token: TokenType.STRING, value: string, offset: number): void;
|
||||
public onToken(token: TokenType.NUMBER, value: number, offset: number): void;
|
||||
public onToken(
|
||||
token: TokenType.SEPARATOR,
|
||||
value: string,
|
||||
offset: number
|
||||
): void;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
public onToken(token: TokenType, value: JsonPrimitive, offset: number): void {
|
||||
// Override me
|
||||
throw new TokenizerError(
|
||||
'Can\'t emit tokens before the "onToken" callback has been set up.'
|
||||
);
|
||||
}
|
||||
|
||||
public onError(err: Error): void {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
|
||||
public onEnd(): void {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
390
node_modules/@streamparser/json/src/tokenparser.ts
generated
vendored
Normal file
390
node_modules/@streamparser/json/src/tokenparser.ts
generated
vendored
Normal file
@ -0,0 +1,390 @@
|
||||
import { TokenType } from "./utils/constants";
|
||||
import {
|
||||
JsonPrimitive,
|
||||
JsonKey,
|
||||
JsonObject,
|
||||
JsonArray,
|
||||
JsonStruct,
|
||||
} from "./utils/types";
|
||||
|
||||
const {
|
||||
LEFT_BRACE,
|
||||
RIGHT_BRACE,
|
||||
LEFT_BRACKET,
|
||||
RIGHT_BRACKET,
|
||||
COLON,
|
||||
COMMA,
|
||||
TRUE,
|
||||
FALSE,
|
||||
NULL,
|
||||
STRING,
|
||||
NUMBER,
|
||||
SEPARATOR,
|
||||
} = TokenType;
|
||||
|
||||
// Parser States
|
||||
enum TokenParserState {
|
||||
VALUE,
|
||||
KEY,
|
||||
COLON,
|
||||
COMMA,
|
||||
ENDED,
|
||||
ERROR,
|
||||
SEPARATOR,
|
||||
}
|
||||
// Parser Modes
|
||||
export enum TokenParserMode {
|
||||
OBJECT,
|
||||
ARRAY,
|
||||
}
|
||||
|
||||
export interface StackElement {
|
||||
key: JsonKey;
|
||||
value: JsonStruct;
|
||||
mode: TokenParserMode | undefined;
|
||||
emit: boolean;
|
||||
}
|
||||
|
||||
export interface TokenParserOptions {
|
||||
paths?: string[];
|
||||
keepStack?: boolean;
|
||||
separator?: string;
|
||||
}
|
||||
|
||||
const defaultOpts: TokenParserOptions = {
|
||||
paths: undefined,
|
||||
keepStack: true,
|
||||
separator: undefined,
|
||||
};
|
||||
|
||||
export class TokenParserError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
// Typescript is broken. This is a workaround
|
||||
Object.setPrototypeOf(this, TokenParserError.prototype);
|
||||
}
|
||||
}
|
||||
|
||||
export default class TokenParser {
|
||||
private readonly paths?: (string[] | undefined)[];
|
||||
private readonly keepStack: boolean;
|
||||
private readonly separator?: string;
|
||||
private state: TokenParserState = TokenParserState.VALUE;
|
||||
private mode: TokenParserMode | undefined = undefined;
|
||||
private key: JsonKey = undefined;
|
||||
private value: JsonPrimitive | JsonStruct | undefined = undefined;
|
||||
private stack: StackElement[] = [];
|
||||
|
||||
constructor(opts?: TokenParserOptions) {
|
||||
opts = { ...defaultOpts, ...opts };
|
||||
|
||||
if (opts.paths) {
|
||||
this.paths = opts.paths.map((path) => {
|
||||
if (path === undefined || path === "$*") return undefined;
|
||||
|
||||
if (!path.startsWith("$"))
|
||||
throw new TokenParserError(
|
||||
`Invalid selector "${path}". Should start with "$".`
|
||||
);
|
||||
const pathParts = path.split(".").slice(1);
|
||||
if (pathParts.includes(""))
|
||||
throw new TokenParserError(
|
||||
`Invalid selector "${path}". ".." syntax not supported.`
|
||||
);
|
||||
return pathParts;
|
||||
});
|
||||
}
|
||||
|
||||
this.keepStack = opts.keepStack as boolean;
|
||||
this.separator = opts.separator;
|
||||
}
|
||||
|
||||
private shouldEmit(): boolean {
|
||||
if (!this.paths) return true;
|
||||
|
||||
return this.paths.some((path) => {
|
||||
if (path === undefined) return true;
|
||||
if (path.length !== this.stack.length) return false;
|
||||
|
||||
for (let i = 0; i < path.length - 1; i++) {
|
||||
const selector = path[i];
|
||||
const key = this.stack[i + 1].key;
|
||||
if (selector === "*") continue;
|
||||
if (selector !== key) return false;
|
||||
}
|
||||
|
||||
const selector = path[path.length - 1];
|
||||
if (selector === "*") return true;
|
||||
return selector === this.key?.toString();
|
||||
});
|
||||
}
|
||||
|
||||
private push(): void {
|
||||
this.stack.push({
|
||||
key: this.key,
|
||||
value: this.value as JsonStruct,
|
||||
mode: this.mode,
|
||||
emit: this.shouldEmit(),
|
||||
});
|
||||
}
|
||||
|
||||
private pop(): void {
|
||||
const value = this.value;
|
||||
|
||||
let emit;
|
||||
({
|
||||
key: this.key,
|
||||
value: this.value,
|
||||
mode: this.mode,
|
||||
emit,
|
||||
} = this.stack.pop() as StackElement);
|
||||
|
||||
this.state =
|
||||
this.mode !== undefined ? TokenParserState.COMMA : TokenParserState.VALUE;
|
||||
|
||||
this.emit(value as JsonPrimitive | JsonStruct, emit);
|
||||
}
|
||||
|
||||
private emit(value: JsonPrimitive | JsonStruct, emit: boolean): void {
|
||||
if (
|
||||
!this.keepStack &&
|
||||
this.value &&
|
||||
this.stack.every((item) => !item.emit)
|
||||
) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
delete (this.value as JsonStruct as any)[this.key as string | number];
|
||||
}
|
||||
|
||||
if (emit) {
|
||||
this.onValue(
|
||||
value,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.key as JsonKey as any,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.value as JsonStruct | undefined as any,
|
||||
this.stack
|
||||
);
|
||||
}
|
||||
|
||||
if (this.stack.length === 0) {
|
||||
if (this.separator) {
|
||||
this.state = TokenParserState.SEPARATOR;
|
||||
} else if (this.separator === undefined) {
|
||||
this.end();
|
||||
}
|
||||
// else if separator === '', expect next JSON object.
|
||||
}
|
||||
}
|
||||
|
||||
public get isEnded(): boolean {
|
||||
return this.state === TokenParserState.ENDED;
|
||||
}
|
||||
|
||||
public write(token: TokenType.LEFT_BRACE, value: "{"): void;
|
||||
public write(token: TokenType.RIGHT_BRACE, value: "}"): void;
|
||||
public write(token: TokenType.LEFT_BRACKET, value: "["): void;
|
||||
public write(token: TokenType.RIGHT_BRACKET, value: "]"): void;
|
||||
public write(token: TokenType.COLON, value: ":"): void;
|
||||
public write(token: TokenType.COMMA, value: ","): void;
|
||||
public write(token: TokenType.TRUE, value: true): void;
|
||||
public write(token: TokenType.FALSE, value: false): void;
|
||||
public write(token: TokenType.NULL, value: null): void;
|
||||
public write(token: TokenType.STRING, value: string): void;
|
||||
public write(token: TokenType.NUMBER, value: number): void;
|
||||
public write(token: TokenType.SEPARATOR, value: string): void;
|
||||
public write(token: TokenType, value: JsonPrimitive): void {
|
||||
if (this.state === TokenParserState.VALUE) {
|
||||
if (
|
||||
token === STRING ||
|
||||
token === NUMBER ||
|
||||
token === TRUE ||
|
||||
token === FALSE ||
|
||||
token === NULL
|
||||
) {
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
(this.value as JsonObject)[this.key as string] = value;
|
||||
this.state = TokenParserState.COMMA;
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
(this.value as JsonArray).push(value);
|
||||
this.state = TokenParserState.COMMA;
|
||||
}
|
||||
|
||||
this.emit(value, this.shouldEmit());
|
||||
return;
|
||||
}
|
||||
|
||||
if (token === LEFT_BRACE) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = (this.value as JsonObject)[this.key as string] = {};
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val = {};
|
||||
(this.value as JsonArray).push(val);
|
||||
this.value = val;
|
||||
} else {
|
||||
this.value = {};
|
||||
}
|
||||
this.mode = TokenParserMode.OBJECT;
|
||||
this.state = TokenParserState.KEY;
|
||||
this.key = undefined;
|
||||
return;
|
||||
}
|
||||
|
||||
if (token === LEFT_BRACKET) {
|
||||
this.push();
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.value = (this.value as JsonObject)[this.key as string] = [];
|
||||
} else if (this.mode === TokenParserMode.ARRAY) {
|
||||
const val: JsonArray = [];
|
||||
(this.value as JsonArray).push(val);
|
||||
this.value = val;
|
||||
} else {
|
||||
this.value = [];
|
||||
}
|
||||
this.mode = TokenParserMode.ARRAY;
|
||||
this.state = TokenParserState.VALUE;
|
||||
this.key = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
this.mode === TokenParserMode.ARRAY &&
|
||||
token === RIGHT_BRACKET &&
|
||||
(this.value as JsonArray).length === 0
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.KEY) {
|
||||
if (token === STRING) {
|
||||
this.key = value as string;
|
||||
this.state = TokenParserState.COLON;
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
token === RIGHT_BRACE &&
|
||||
Object.keys(this.value as JsonObject).length === 0
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.COLON) {
|
||||
if (token === COLON) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.COMMA) {
|
||||
if (token === COMMA) {
|
||||
if (this.mode === TokenParserMode.ARRAY) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
(this.key as number) += 1;
|
||||
return;
|
||||
}
|
||||
|
||||
/* istanbul ignore else */
|
||||
if (this.mode === TokenParserMode.OBJECT) {
|
||||
this.state = TokenParserState.KEY;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(token === RIGHT_BRACE && this.mode === TokenParserMode.OBJECT) ||
|
||||
(token === RIGHT_BRACKET && this.mode === TokenParserMode.ARRAY)
|
||||
) {
|
||||
this.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.state === TokenParserState.SEPARATOR) {
|
||||
if (token === SEPARATOR && value === this.separator) {
|
||||
this.state = TokenParserState.VALUE;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.error(
|
||||
new TokenParserError(
|
||||
`Unexpected ${TokenType[token]} (${JSON.stringify(value)}) in state ${
|
||||
TokenParserState[this.state]
|
||||
}`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
public error(err: Error): void {
|
||||
if (this.state !== TokenParserState.ENDED) {
|
||||
this.state = TokenParserState.ERROR;
|
||||
}
|
||||
|
||||
this.onError(err);
|
||||
}
|
||||
|
||||
public end(): void {
|
||||
if (
|
||||
(this.state !== TokenParserState.VALUE &&
|
||||
this.state !== TokenParserState.SEPARATOR) ||
|
||||
this.stack.length > 0
|
||||
) {
|
||||
this.error(
|
||||
new Error(
|
||||
`Parser ended in mid-parsing (state: ${
|
||||
TokenParserState[this.state]
|
||||
}). Either not all the data was received or the data was invalid.`
|
||||
)
|
||||
);
|
||||
} else {
|
||||
this.state = TokenParserState.ENDED;
|
||||
this.onEnd();
|
||||
}
|
||||
}
|
||||
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: number,
|
||||
parent: JsonArray,
|
||||
stack: StackElement[]
|
||||
): void;
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: string,
|
||||
parent: JsonObject,
|
||||
stack: StackElement[]
|
||||
): void;
|
||||
public onValue(
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: undefined,
|
||||
parent: undefined,
|
||||
stack: []
|
||||
): void;
|
||||
public onValue(
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
value: JsonPrimitive | JsonStruct,
|
||||
key: JsonKey | undefined,
|
||||
parent: JsonStruct | undefined,
|
||||
stack: StackElement[]
|
||||
/* eslint-enable @typescript-eslint/no-unused-vars */
|
||||
): void {
|
||||
// Override me
|
||||
throw new TokenParserError(
|
||||
'Can\'t emit data before the "onValue" callback has been set up.'
|
||||
);
|
||||
}
|
||||
|
||||
public onError(err: Error): void {
|
||||
// Override me
|
||||
throw err;
|
||||
}
|
||||
|
||||
public onEnd(): void {
|
||||
// Override me
|
||||
}
|
||||
}
|
||||
75
node_modules/@streamparser/json/src/utils/bufferedString.ts
generated
vendored
Normal file
75
node_modules/@streamparser/json/src/utils/bufferedString.ts
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
export interface StringBuilder {
|
||||
byteLength: number;
|
||||
appendChar: (char: number) => void;
|
||||
appendBuf: (buf: Uint8Array, start?: number, end?: number) => void;
|
||||
reset: () => void;
|
||||
toString: () => string;
|
||||
}
|
||||
|
||||
export class NonBufferedString implements StringBuilder {
|
||||
private decoder = new TextDecoder("utf-8");
|
||||
private string = "";
|
||||
public byteLength = 0;
|
||||
|
||||
public appendChar(char: number): void {
|
||||
this.string += String.fromCharCode(char);
|
||||
this.byteLength += 1;
|
||||
}
|
||||
|
||||
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
|
||||
this.string += this.decoder.decode(buf.subarray(start, end));
|
||||
this.byteLength += end - start;
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this.string = "";
|
||||
this.byteLength = 0;
|
||||
}
|
||||
|
||||
public toString(): string {
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
|
||||
export class BufferedString implements StringBuilder {
|
||||
private decoder = new TextDecoder("utf-8");
|
||||
private buffer: Uint8Array;
|
||||
private bufferOffset = 0;
|
||||
private string = "";
|
||||
public byteLength = 0;
|
||||
|
||||
public constructor(bufferSize: number) {
|
||||
this.buffer = new Uint8Array(bufferSize);
|
||||
}
|
||||
|
||||
public appendChar(char: number): void {
|
||||
if (this.bufferOffset >= this.buffer.length) this.flushStringBuffer();
|
||||
this.buffer[this.bufferOffset++] = char;
|
||||
this.byteLength += 1;
|
||||
}
|
||||
|
||||
public appendBuf(buf: Uint8Array, start = 0, end: number = buf.length): void {
|
||||
const size = end - start;
|
||||
if (this.bufferOffset + size > this.buffer.length) this.flushStringBuffer();
|
||||
this.buffer.set(buf.subarray(start, end), this.bufferOffset);
|
||||
this.bufferOffset += size;
|
||||
this.byteLength += size;
|
||||
}
|
||||
|
||||
private flushStringBuffer(): void {
|
||||
this.string += this.decoder.decode(
|
||||
this.buffer.subarray(0, this.bufferOffset)
|
||||
);
|
||||
this.bufferOffset = 0;
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this.string = "";
|
||||
this.bufferOffset = 0;
|
||||
this.byteLength = 0;
|
||||
}
|
||||
public toString(): string {
|
||||
this.flushStringBuffer();
|
||||
return this.string;
|
||||
}
|
||||
}
|
||||
14
node_modules/@streamparser/json/src/utils/constants.ts
generated
vendored
Normal file
14
node_modules/@streamparser/json/src/utils/constants.ts
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
export enum TokenType {
|
||||
LEFT_BRACE = 0x1,
|
||||
RIGHT_BRACE = 0x2,
|
||||
LEFT_BRACKET = 0x3,
|
||||
RIGHT_BRACKET = 0x4,
|
||||
COLON = 0x5,
|
||||
COMMA = 0x6,
|
||||
TRUE = 0x7,
|
||||
FALSE = 0x8,
|
||||
NULL = 0x9,
|
||||
STRING = 0xa,
|
||||
NUMBER = 0xb,
|
||||
SEPARATOR = 0xc,
|
||||
}
|
||||
5
node_modules/@streamparser/json/src/utils/types.ts
generated
vendored
Normal file
5
node_modules/@streamparser/json/src/utils/types.ts
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
export type JsonPrimitive = string | number | boolean | null;
|
||||
export type JsonKey = string | number | undefined;
|
||||
export type JsonObject = { [key: string]: JsonPrimitive | JsonStruct };
|
||||
export type JsonArray = (JsonPrimitive | JsonStruct)[];
|
||||
export type JsonStruct = JsonObject | JsonArray;
|
||||
113
node_modules/@streamparser/json/src/utils/utf-8.ts
generated
vendored
Normal file
113
node_modules/@streamparser/json/src/utils/utf-8.ts
generated
vendored
Normal file
@ -0,0 +1,113 @@
|
||||
export enum charset {
|
||||
BACKSPACE = 0x8, // "\b"
|
||||
FORM_FEED = 0xc, // "\f"
|
||||
NEWLINE = 0xa, // "\n"
|
||||
CARRIAGE_RETURN = 0xd, // "\r"
|
||||
TAB = 0x9, // "\t"
|
||||
SPACE = 0x20, //
|
||||
EXCLAMATION_MARK = 0x21, // !
|
||||
QUOTATION_MARK = 0x22, // "
|
||||
NUMBER_SIGN = 0x23, // #
|
||||
DOLLAR_SIGN = 0x24, // $
|
||||
PERCENT_SIGN = 0x25, // %
|
||||
AMPERSAND = 0x26, // &
|
||||
APOSTROPHE = 0x27, // '
|
||||
LEFT_PARENTHESIS = 0x28, // (
|
||||
RIGHT_PARENTHESIS = 0x29, // )
|
||||
ASTERISK = 0x2a, // *
|
||||
PLUS_SIGN = 0x2b, // +
|
||||
COMMA = 0x2c, // ,
|
||||
HYPHEN_MINUS = 0x2d, // -
|
||||
FULL_STOP = 0x2e, // .
|
||||
SOLIDUS = 0x2f, // /
|
||||
DIGIT_ZERO = 0x30, // 0
|
||||
DIGIT_ONE = 0x31, // 1
|
||||
DIGIT_TWO = 0x32, // 2
|
||||
DIGIT_THREE = 0x33, // 3
|
||||
DIGIT_FOUR = 0x34, // 4
|
||||
DIGIT_FIVE = 0x35, // 5
|
||||
DIGIT_SIX = 0x36, // 6
|
||||
DIGIT_SEVEN = 0x37, // 7
|
||||
DIGIT_EIGHT = 0x38, // 8
|
||||
DIGIT_NINE = 0x39, // 9
|
||||
COLON = 0x3a, // =
|
||||
SEMICOLON = 0x3b, // ;
|
||||
LESS_THAN_SIGN = 0x3c, // <
|
||||
EQUALS_SIGN = 0x3d, // =
|
||||
GREATER_THAN_SIGN = 0x3e, // >
|
||||
QUESTION_MARK = 0x3f, // ?
|
||||
COMMERCIAL_AT = 0x40, // @
|
||||
LATIN_CAPITAL_LETTER_A = 0x41, // A
|
||||
LATIN_CAPITAL_LETTER_B = 0x42, // B
|
||||
LATIN_CAPITAL_LETTER_C = 0x43, // C
|
||||
LATIN_CAPITAL_LETTER_D = 0x44, // D
|
||||
LATIN_CAPITAL_LETTER_E = 0x45, // E
|
||||
LATIN_CAPITAL_LETTER_F = 0x46, // F
|
||||
LATIN_CAPITAL_LETTER_G = 0x47, // G
|
||||
LATIN_CAPITAL_LETTER_H = 0x48, // H
|
||||
LATIN_CAPITAL_LETTER_I = 0x49, // I
|
||||
LATIN_CAPITAL_LETTER_J = 0x4a, // J
|
||||
LATIN_CAPITAL_LETTER_K = 0x4b, // K
|
||||
LATIN_CAPITAL_LETTER_L = 0x4c, // L
|
||||
LATIN_CAPITAL_LETTER_M = 0x4d, // M
|
||||
LATIN_CAPITAL_LETTER_N = 0x4e, // N
|
||||
LATIN_CAPITAL_LETTER_O = 0x4f, // O
|
||||
LATIN_CAPITAL_LETTER_P = 0x50, // P
|
||||
LATIN_CAPITAL_LETTER_Q = 0x51, // Q
|
||||
LATIN_CAPITAL_LETTER_R = 0x52, // R
|
||||
LATIN_CAPITAL_LETTER_S = 0x53, // S
|
||||
LATIN_CAPITAL_LETTER_T = 0x54, // T
|
||||
LATIN_CAPITAL_LETTER_U = 0x55, // U
|
||||
LATIN_CAPITAL_LETTER_V = 0x56, // V
|
||||
LATIN_CAPITAL_LETTER_W = 0x57, // W
|
||||
LATIN_CAPITAL_LETTER_X = 0x58, // X
|
||||
LATIN_CAPITAL_LETTER_Y = 0x59, // Y
|
||||
LATIN_CAPITAL_LETTER_Z = 0x5a, // Z
|
||||
LEFT_SQUARE_BRACKET = 0x5b, // [
|
||||
REVERSE_SOLIDUS = 0x5c, // \
|
||||
RIGHT_SQUARE_BRACKET = 0x5d, // ]
|
||||
CIRCUMFLEX_ACCENT = 0x5e, // ^
|
||||
LOW_LINE = 0x5f, // _
|
||||
GRAVE_ACCENT = 0x60, // `
|
||||
LATIN_SMALL_LETTER_A = 0x61, // a
|
||||
LATIN_SMALL_LETTER_B = 0x62, // b
|
||||
LATIN_SMALL_LETTER_C = 0x63, // c
|
||||
LATIN_SMALL_LETTER_D = 0x64, // d
|
||||
LATIN_SMALL_LETTER_E = 0x65, // e
|
||||
LATIN_SMALL_LETTER_F = 0x66, // f
|
||||
LATIN_SMALL_LETTER_G = 0x67, // g
|
||||
LATIN_SMALL_LETTER_H = 0x68, // h
|
||||
LATIN_SMALL_LETTER_I = 0x69, // i
|
||||
LATIN_SMALL_LETTER_J = 0x6a, // j
|
||||
LATIN_SMALL_LETTER_K = 0x6b, // k
|
||||
LATIN_SMALL_LETTER_L = 0x6c, // l
|
||||
LATIN_SMALL_LETTER_M = 0x6d, // m
|
||||
LATIN_SMALL_LETTER_N = 0x6e, // n
|
||||
LATIN_SMALL_LETTER_O = 0x6f, // o
|
||||
LATIN_SMALL_LETTER_P = 0x70, // p
|
||||
LATIN_SMALL_LETTER_Q = 0x71, // q
|
||||
LATIN_SMALL_LETTER_R = 0x72, // r
|
||||
LATIN_SMALL_LETTER_S = 0x73, // s
|
||||
LATIN_SMALL_LETTER_T = 0x74, // t
|
||||
LATIN_SMALL_LETTER_U = 0x75, // u
|
||||
LATIN_SMALL_LETTER_V = 0x76, // v
|
||||
LATIN_SMALL_LETTER_W = 0x77, // w
|
||||
LATIN_SMALL_LETTER_X = 0x78, // x
|
||||
LATIN_SMALL_LETTER_Y = 0x79, // y
|
||||
LATIN_SMALL_LETTER_Z = 0x7a, // z
|
||||
LEFT_CURLY_BRACKET = 0x7b, // {
|
||||
VERTICAL_LINE = 0x7c, // |
|
||||
RIGHT_CURLY_BRACKET = 0x7d, // }
|
||||
TILDE = 0x7e, // ~
|
||||
}
|
||||
|
||||
export const escapedSequences: { [key: number]: number } = {
|
||||
[charset.QUOTATION_MARK]: charset.QUOTATION_MARK,
|
||||
[charset.REVERSE_SOLIDUS]: charset.REVERSE_SOLIDUS,
|
||||
[charset.SOLIDUS]: charset.SOLIDUS,
|
||||
[charset.LATIN_SMALL_LETTER_B]: charset.BACKSPACE,
|
||||
[charset.LATIN_SMALL_LETTER_F]: charset.FORM_FEED,
|
||||
[charset.LATIN_SMALL_LETTER_N]: charset.NEWLINE,
|
||||
[charset.LATIN_SMALL_LETTER_R]: charset.CARRIAGE_RETURN,
|
||||
[charset.LATIN_SMALL_LETTER_T]: charset.TAB,
|
||||
};
|
||||
89
node_modules/@streamparser/json/test/callbacks.ts
generated
vendored
Normal file
89
node_modules/@streamparser/json/test/callbacks.ts
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
import Tokenizer from "../src/tokenizer";
|
||||
import TokenParser from "../src/tokenparser";
|
||||
import { TokenType } from "../src/utils/constants";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
test("should error on missing onToken callback", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new Tokenizer();
|
||||
|
||||
try {
|
||||
p.write('"test"');
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
|
||||
test("should throw if missing onError callback", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new TokenParser();
|
||||
p.end();
|
||||
|
||||
try {
|
||||
p.write(TokenType.TRUE, true);
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
|
||||
test("should error on missing onValue callback", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser();
|
||||
|
||||
try {
|
||||
p.write('"test"');
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
|
||||
test("should handle invalid input using the onError callback if set", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
p.onError = (err) =>
|
||||
t.equal(
|
||||
err.message,
|
||||
"Unexpected type. The `write` function only accepts Arrays, TypedArrays and Strings."
|
||||
);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
p.write(745674 as any);
|
||||
});
|
||||
|
||||
test("should handle errors using the onError callback if set", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
p.onError = (err) =>
|
||||
t.equal(err.message, 'Unexpected "t" at position "2" in state ENDED');
|
||||
|
||||
p.write('""test""');
|
||||
});
|
||||
|
||||
test("should handle processing end using the onEnd callback if set", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
p.onEnd = () => t.pass();
|
||||
|
||||
p.write('"test"');
|
||||
});
|
||||
127
node_modules/@streamparser/json/test/end.ts
generated
vendored
Normal file
127
node_modules/@streamparser/json/test/end.ts
generated
vendored
Normal file
@ -0,0 +1,127 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
test("should fail if writing after ending", (t) => {
|
||||
t.plan(2);
|
||||
|
||||
const p = new JSONParser({ separator: "" });
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
p.write('"test"');
|
||||
p.end();
|
||||
|
||||
t.ok(p.isEnded);
|
||||
try {
|
||||
p.write('"test"');
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
|
||||
test("should auto-end after emiting one object", (t) => {
|
||||
const values = ["2 2", "2.33456{}", "{}{}{}"];
|
||||
|
||||
t.plan(values.length * 2);
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.ok(p.isEnded);
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("should emit numbers if ending on a valid number", (t) => {
|
||||
const values = [
|
||||
"0",
|
||||
"2",
|
||||
"2.33456",
|
||||
"2.33456e+1",
|
||||
"-2",
|
||||
"-2.33456",
|
||||
"-2.33456e+1",
|
||||
];
|
||||
|
||||
const expected = values.map((str) => JSON.parse(str));
|
||||
|
||||
t.plan(expected.length * 2);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser({ separator: "" });
|
||||
p.onValue = (value) => t.equal(value, expected[i++]);
|
||||
|
||||
p.write(str);
|
||||
p.end();
|
||||
|
||||
t.ok(p.isEnded);
|
||||
});
|
||||
});
|
||||
|
||||
test("should fail if ending in the middle of parsing", (t) => {
|
||||
const values = [
|
||||
"2.",
|
||||
"2.33456e",
|
||||
"2.33456e+",
|
||||
'"asdfasd',
|
||||
"tru",
|
||||
'"fa',
|
||||
'"nul',
|
||||
"{",
|
||||
"[",
|
||||
'{ "a":',
|
||||
'{ "a": { "b": 1, ',
|
||||
'{ "a": { "b": 1, "c": 2, "d": 3, "e": 4 }',
|
||||
];
|
||||
|
||||
t.plan(values.length);
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
|
||||
try {
|
||||
p.end();
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test("should not fail if ending waiting for a separator", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const separator = "\n";
|
||||
|
||||
const p = new JSONParser({ separator });
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
p.write("1");
|
||||
p.write(separator);
|
||||
p.write("2");
|
||||
|
||||
p.end();
|
||||
|
||||
t.ok(p.isEnded);
|
||||
});
|
||||
89
node_modules/@streamparser/json/test/inputs.ts
generated
vendored
Normal file
89
node_modules/@streamparser/json/test/inputs.ts
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
import { charset } from "../src/utils/utf-8";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const { QUOTATION_MARK } = charset;
|
||||
|
||||
const quote = String.fromCharCode(QUOTATION_MARK);
|
||||
|
||||
test("write accept strings", (t) => {
|
||||
t.plan(1);
|
||||
const value = "test";
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (v) => t.equal(v, value);
|
||||
|
||||
p.write(quote);
|
||||
p.write(value);
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("write accept Uint8Array", (t) => {
|
||||
t.plan(1);
|
||||
const value = "test";
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (v) => t.equal(v, value);
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([116, 101, 115, 116]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("write accept Uint16Array", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const value = "test";
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (v) => t.equal(v, value);
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint16Array([116, 101, 115, 116]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("write accept Uint32Array", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const value = "test";
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (v) => t.equal(v, value);
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint32Array([116, 101, 115, 116]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("write accept Array", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const value = "test";
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (v) => t.equal(v, value);
|
||||
|
||||
p.write(quote);
|
||||
p.write([116, 101, 115, 116]);
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("write throw on invalid type", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
p.write(745674 as any);
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
45
node_modules/@streamparser/json/test/keepStack.ts
generated
vendored
Normal file
45
node_modules/@streamparser/json/test/keepStack.ts
generated
vendored
Normal file
@ -0,0 +1,45 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const testData = [
|
||||
{
|
||||
value: '{ "a": { "b": 1, "c": 2, "d": 3, "e": 4 } }',
|
||||
paths: ["$"],
|
||||
expected: 1,
|
||||
},
|
||||
{
|
||||
value: '{ "a": { "b": 1, "c": 2, "d": 3, "e": 4 } }',
|
||||
paths: ["$.a.*"],
|
||||
expected: 4,
|
||||
},
|
||||
{
|
||||
value: '{ "a": { "b": 1, "c": 2, "d": 3, "e": 4 } }',
|
||||
paths: ["$.a.e"],
|
||||
expected: 1,
|
||||
},
|
||||
{ value: '{ "a": { "b": [1,2,3,4,5,6] } }', paths: ["$.a.b.*"], expected: 6 },
|
||||
{
|
||||
value: '[{ "a": 1 }, { "a": 2 }, { "a": 3 }]',
|
||||
paths: ["$.*"],
|
||||
expected: 3,
|
||||
},
|
||||
];
|
||||
|
||||
testData.forEach(({ value, paths, expected }) => {
|
||||
test(`should keep parent empty if keepStack === false`, {}, (t) => {
|
||||
t.plan(expected);
|
||||
|
||||
const p = new JSONParser({ paths, keepStack: false });
|
||||
p.onValue = (value, key, parent) => {
|
||||
if (parent === undefined) {
|
||||
t.pass();
|
||||
return;
|
||||
}
|
||||
t.equal(Object.keys(parent).length, 0);
|
||||
};
|
||||
|
||||
p.write(value);
|
||||
});
|
||||
});
|
||||
89
node_modules/@streamparser/json/test/offset.ts
generated
vendored
Normal file
89
node_modules/@streamparser/json/test/offset.ts
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
import { TokenType } from "../src/utils/constants";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const {
|
||||
LEFT_BRACE,
|
||||
RIGHT_BRACE,
|
||||
LEFT_BRACKET,
|
||||
RIGHT_BRACKET,
|
||||
COLON,
|
||||
COMMA,
|
||||
TRUE,
|
||||
FALSE,
|
||||
NULL,
|
||||
STRING,
|
||||
NUMBER,
|
||||
} = TokenType;
|
||||
|
||||
const input = '{\n "string": "value",\n "number": 3,\n "object"';
|
||||
const input2 = ': {\n "key": "vд"\n },\n "array": [\n -1,\n 12\n ]\n ';
|
||||
const input3 = '"null": null, "true": true, "false": false, "frac": 3.14 }';
|
||||
|
||||
const offsets = [
|
||||
[0, LEFT_BRACE],
|
||||
[4, STRING],
|
||||
[12, COLON],
|
||||
[14, STRING],
|
||||
[21, COMMA],
|
||||
[25, STRING],
|
||||
[33, COLON],
|
||||
[35, NUMBER],
|
||||
[36, COMMA],
|
||||
[40, STRING],
|
||||
[48, COLON],
|
||||
[50, LEFT_BRACE],
|
||||
[54, STRING],
|
||||
[59, COLON],
|
||||
[61, STRING],
|
||||
[69, RIGHT_BRACE],
|
||||
[70, COMMA],
|
||||
[74, STRING],
|
||||
[81, COLON],
|
||||
[83, LEFT_BRACKET],
|
||||
[87, NUMBER],
|
||||
[89, COMMA],
|
||||
[93, NUMBER],
|
||||
[98, RIGHT_BRACKET],
|
||||
[102, STRING],
|
||||
[108, COLON],
|
||||
[110, NULL],
|
||||
[114, COMMA],
|
||||
[116, STRING],
|
||||
[122, COLON],
|
||||
[124, TRUE],
|
||||
[128, COMMA],
|
||||
[130, STRING],
|
||||
[137, COLON],
|
||||
[139, FALSE],
|
||||
[144, COMMA],
|
||||
[146, STRING],
|
||||
[152, COLON],
|
||||
[154, NUMBER],
|
||||
[159, RIGHT_BRACE],
|
||||
];
|
||||
|
||||
test("offset", (t) => {
|
||||
t.plan(offsets.length * 2 + 1);
|
||||
|
||||
let i = 0;
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onToken = (token, value, offset) => {
|
||||
t.equal(offset, offsets[i][0]);
|
||||
t.equal(token, offsets[i][1]);
|
||||
i += 1;
|
||||
};
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
p.onEnd = () => t.end();
|
||||
|
||||
p.write(input);
|
||||
p.write(input2);
|
||||
p.write(input3);
|
||||
|
||||
t.equal(i, offsets.length);
|
||||
});
|
||||
151
node_modules/@streamparser/json/test/performance.ts
generated
vendored
Normal file
151
node_modules/@streamparser/json/test/performance.ts
generated
vendored
Normal file
@ -0,0 +1,151 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
import { charset } from "../src/utils/utf-8";
|
||||
|
||||
const { test } = tap;
|
||||
const {
|
||||
LATIN_SMALL_LETTER_A,
|
||||
QUOTATION_MARK,
|
||||
DIGIT_ONE,
|
||||
LEFT_SQUARE_BRACKET,
|
||||
RIGHT_SQUARE_BRACKET,
|
||||
LEFT_CURLY_BRACKET,
|
||||
RIGHT_CURLY_BRACKET,
|
||||
COMMA,
|
||||
COLON,
|
||||
} = charset;
|
||||
|
||||
const quote = String.fromCharCode(QUOTATION_MARK);
|
||||
|
||||
const oneKB = 1024;
|
||||
const oneMB = 1024 * oneKB;
|
||||
const twoHundredMB = 200 * oneMB;
|
||||
const kbsIn200MBs = twoHundredMB / oneKB;
|
||||
|
||||
test("buffered parsing", (t) => {
|
||||
t.plan(3);
|
||||
|
||||
t.test("can handle large strings without running out of memory", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const chunk = new Uint8Array(oneKB).fill(LATIN_SMALL_LETTER_A);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize: 64 * 1024 });
|
||||
p.onToken = (type, value) =>
|
||||
t.equal(
|
||||
(value as string).length,
|
||||
twoHundredMB,
|
||||
"token should be size of input json"
|
||||
);
|
||||
|
||||
p.write(quote);
|
||||
for (let index = 0; index < kbsIn200MBs; index++) {
|
||||
p.write(chunk);
|
||||
}
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
t.test("can handle large numbers without running out of memory", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const chunk = new Uint8Array(oneKB).fill(DIGIT_ONE);
|
||||
|
||||
const p = new JSONParser({ numberBufferSize: 64 * 1024 });
|
||||
p.onToken = (type, value) =>
|
||||
t.equal(value, 1.1111111111111112, "token should be correct");
|
||||
|
||||
p.write("1.");
|
||||
for (let index = 0; index < kbsIn200MBs; index++) {
|
||||
p.write(chunk);
|
||||
}
|
||||
p.end();
|
||||
});
|
||||
|
||||
t.test("can handle multi-byte unicode splits", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize: 1 });
|
||||
p.onToken = (type, value) => t.equal(value, "𠜎");
|
||||
|
||||
p.write('"𠜎"');
|
||||
});
|
||||
});
|
||||
|
||||
test(`should keep memory stable if keepStack === false on array`, {}, (t) => {
|
||||
t.plan(201);
|
||||
|
||||
const chunk = new Uint8Array(oneKB).fill(LATIN_SMALL_LETTER_A);
|
||||
chunk[0] = QUOTATION_MARK;
|
||||
chunk[chunk.length - 1] = QUOTATION_MARK;
|
||||
const commaChunk = new Uint8Array([COMMA]);
|
||||
|
||||
const thirtyMBs = 20 * 1024 * 1024;
|
||||
let valuesLeft = kbsIn200MBs;
|
||||
|
||||
const p = new JSONParser({
|
||||
paths: ["$.*"],
|
||||
keepStack: false,
|
||||
stringBufferSize: oneKB,
|
||||
});
|
||||
p.onValue = () => {
|
||||
if (valuesLeft-- % oneKB !== 0) return;
|
||||
|
||||
const actualMemoryUsage = process.memoryUsage().heapUsed;
|
||||
t.ok(
|
||||
actualMemoryUsage - intialMemoryUsage < thirtyMBs,
|
||||
`${actualMemoryUsage} is significantly larger than ${intialMemoryUsage}`
|
||||
);
|
||||
};
|
||||
|
||||
const intialMemoryUsage = process.memoryUsage().heapUsed;
|
||||
|
||||
p.write(new Uint8Array([LEFT_SQUARE_BRACKET]));
|
||||
// decreasing so the number doesn't need to be reallocated
|
||||
for (let index = kbsIn200MBs; index > 0; index--) {
|
||||
p.write(chunk);
|
||||
p.write(commaChunk);
|
||||
}
|
||||
p.write(chunk);
|
||||
p.write(new Uint8Array([RIGHT_SQUARE_BRACKET]));
|
||||
});
|
||||
|
||||
test(`should keep memory stable if keepStack === false on object`, {}, (t) => {
|
||||
t.plan(201);
|
||||
|
||||
const chunk = new Uint8Array(oneKB).fill(LATIN_SMALL_LETTER_A);
|
||||
chunk[0] = QUOTATION_MARK;
|
||||
chunk[1] = LATIN_SMALL_LETTER_A;
|
||||
chunk[2] = QUOTATION_MARK;
|
||||
chunk[3] = COLON;
|
||||
chunk[4] = QUOTATION_MARK;
|
||||
chunk[chunk.length - 1] = QUOTATION_MARK;
|
||||
const commaChunk = new Uint8Array([COMMA]);
|
||||
|
||||
const thirtyMBs = 20 * 1024 * 1024;
|
||||
let valuesLeft = kbsIn200MBs;
|
||||
|
||||
const p = new JSONParser({
|
||||
paths: ["$.*"],
|
||||
keepStack: false,
|
||||
stringBufferSize: oneKB,
|
||||
});
|
||||
p.onValue = () => {
|
||||
if (valuesLeft-- % oneKB !== 0) return;
|
||||
|
||||
const actualMemoryUsage = process.memoryUsage().heapUsed;
|
||||
t.ok(
|
||||
actualMemoryUsage - intialMemoryUsage < thirtyMBs,
|
||||
`${actualMemoryUsage} is significantly larger than ${intialMemoryUsage}`
|
||||
);
|
||||
};
|
||||
|
||||
const intialMemoryUsage = process.memoryUsage().heapUsed;
|
||||
p.write(new Uint8Array([LEFT_CURLY_BRACKET]));
|
||||
// decreasing so the number doesn't need to be reallocated
|
||||
for (let index = kbsIn200MBs; index > 0; index--) {
|
||||
p.write(chunk);
|
||||
p.write(commaChunk);
|
||||
}
|
||||
p.write(chunk);
|
||||
p.write(new Uint8Array([RIGHT_CURLY_BRACKET]));
|
||||
});
|
||||
87
node_modules/@streamparser/json/test/selectors.ts
generated
vendored
Normal file
87
node_modules/@streamparser/json/test/selectors.ts
generated
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const testData = [
|
||||
{ value: "[0,1,-1]", paths: ["$"], expected: [[0, 1, -1]] },
|
||||
{ value: "[0,1,-1]", paths: ["$.*"], expected: [0, 1, -1] },
|
||||
{ value: "[0,1,-1]", paths: [undefined], expected: [0, 1, -1, [0, 1, -1]] },
|
||||
{ value: "[0,1,-1]", paths: ["$*"], expected: [0, 1, -1, [0, 1, -1]] },
|
||||
{
|
||||
value: "[0,1,[-1, 2]]",
|
||||
paths: ["$", "$.*"],
|
||||
expected: [0, 1, [-1, 2], [0, 1, [-1, 2]]],
|
||||
},
|
||||
{ value: "[0,1,-1]", paths: ["$.1"], expected: [1] },
|
||||
{ value: '{ "a": { "b": 1, "c": 2 } }', paths: ["$.a.*"], expected: [1, 2] },
|
||||
{ value: '{ "a": { "b": 1, "c": 2 } }', paths: ["$.a.c"], expected: [2] },
|
||||
{
|
||||
value: '{ "a": { "b": [1,2], "c": [3, 4] } }',
|
||||
paths: ["$.a.*.*"],
|
||||
expected: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
value: '{ "a": { "b": [1,2], "c": [3, 4] } }',
|
||||
paths: ["$.a.*.1"],
|
||||
expected: [2, 4],
|
||||
},
|
||||
{
|
||||
value: '{ "a": { "b": [1,2], "c": [3, 4] } }',
|
||||
paths: ["$.a.c.*"],
|
||||
expected: [3, 4],
|
||||
},
|
||||
{
|
||||
value: '{ "a": { "b": [1,2], "c": [3, 4] } }',
|
||||
paths: ["$.a.c.1"],
|
||||
expected: [4],
|
||||
},
|
||||
];
|
||||
|
||||
testData.forEach(({ value, paths, expected }) => {
|
||||
test(`Using selector ${paths} should emit only selected values`, {}, (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
const p = new JSONParser({ paths });
|
||||
p.onValue = (value) => {
|
||||
t.same(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(value);
|
||||
});
|
||||
});
|
||||
|
||||
const invalidTestData = [
|
||||
{
|
||||
paths: ["*"],
|
||||
expectedError: 'Invalid selector "*". Should start with "$".',
|
||||
},
|
||||
{
|
||||
paths: [".*"],
|
||||
expectedError: 'Invalid selector ".*". Should start with "$".',
|
||||
},
|
||||
{
|
||||
paths: ["$..*"],
|
||||
expectedError: 'Invalid selector "$..*". ".." syntax not supported.',
|
||||
},
|
||||
];
|
||||
|
||||
invalidTestData.forEach(({ paths, expectedError }) => {
|
||||
test(`fail on invalid selector ${paths}`, {}, (t) => {
|
||||
t.plan(1);
|
||||
|
||||
try {
|
||||
new JSONParser({ paths });
|
||||
t.fail("Error expected on invalid selector");
|
||||
} catch (err) {
|
||||
t.equal(err.message, expectedError);
|
||||
}
|
||||
});
|
||||
});
|
||||
106
node_modules/@streamparser/json/test/separator.ts
generated
vendored
Normal file
106
node_modules/@streamparser/json/test/separator.ts
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const testData = [
|
||||
{ value: "true", expected: [true] },
|
||||
{ value: "false", expected: [false] },
|
||||
{ value: "null", expected: [null] },
|
||||
{ value: '"string"', expected: ["string"] },
|
||||
{ value: "[1,2,3]", expected: [1, 2, 3, [1, 2, 3]] },
|
||||
{
|
||||
value: '{ "a": 0, "b": 1, "c": -1 }',
|
||||
expected: [0, 1, -1, { a: 0, b: 1, c: -1 }],
|
||||
},
|
||||
];
|
||||
|
||||
const expected = testData
|
||||
.map(({ expected }) => expected)
|
||||
.reduce((acc, val) => [...acc, ...val], []);
|
||||
|
||||
test("separator: empty string", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
const p = new JSONParser({ separator: "" });
|
||||
p.onValue = (value) => {
|
||||
t.same(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
testData.forEach(({ value }) => p.write(value));
|
||||
|
||||
p.end();
|
||||
});
|
||||
|
||||
test("separator: ND-JSON", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
const separator = "\n";
|
||||
let i = 0;
|
||||
|
||||
const p = new JSONParser({ separator });
|
||||
p.onValue = (value) => {
|
||||
t.same(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
testData.forEach(({ value }) => {
|
||||
p.write(value);
|
||||
p.write(separator);
|
||||
});
|
||||
|
||||
p.end();
|
||||
});
|
||||
|
||||
const separators = ["\t\n", "abc", "SEPARATOR"];
|
||||
separators.forEach((separator) => {
|
||||
test("separator: multi-byte", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
const p = new JSONParser({ separator });
|
||||
p.onValue = (value) => {
|
||||
t.same(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
testData.forEach(({ value }) => {
|
||||
p.write(value);
|
||||
p.write(separator);
|
||||
});
|
||||
|
||||
p.end();
|
||||
});
|
||||
});
|
||||
|
||||
test(`separator: fail on invalid value`, {}, (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ separator: "abc" });
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write("abe");
|
||||
t.fail("Error expected on invalid selector");
|
||||
} catch (err) {
|
||||
t.equal(err.message, 'Unexpected "e" at position "2" in state SEPARATOR');
|
||||
}
|
||||
});
|
||||
108
node_modules/@streamparser/json/test/types/arrays.ts
generated
vendored
Normal file
108
node_modules/@streamparser/json/test/types/arrays.ts
generated
vendored
Normal file
@ -0,0 +1,108 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const values = [
|
||||
"[]",
|
||||
"[0,1,-1]",
|
||||
"[1.0,1.1,-1.1,-1.0]",
|
||||
"[-1]",
|
||||
"[-0.1]",
|
||||
"[6.02e23, 6.02e+23, 6.02e-23, 0e23]",
|
||||
"[7161093205057351174]",
|
||||
];
|
||||
|
||||
const expected = [
|
||||
[[], []],
|
||||
[[0], 0],
|
||||
[[1], 1],
|
||||
[[2], -1],
|
||||
[[], [0, 1, -1]],
|
||||
[[0], 1],
|
||||
[[1], 1.1],
|
||||
[[2], -1.1],
|
||||
[[3], -1],
|
||||
[[], [1, 1.1, -1.1, -1]],
|
||||
[[0], -1],
|
||||
[[], [-1]],
|
||||
[[0], -0.1],
|
||||
[[], [-0.1]],
|
||||
[[0], 6.02e23],
|
||||
[[1], 6.02e23],
|
||||
[[2], 6.02e-23],
|
||||
[[3], 0e23],
|
||||
[[], [6.02e23, 6.02e23, 6.02e-23, 0e23]],
|
||||
[[0], "7161093205057351174"],
|
||||
[[], ["7161093205057351174"]],
|
||||
];
|
||||
|
||||
test("arrays", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value, key, parent, stack) => {
|
||||
const keys = stack
|
||||
.slice(1)
|
||||
.map((item) => item.key)
|
||||
.concat(key !== undefined ? key : []);
|
||||
|
||||
t.same(
|
||||
[keys, value],
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${[keys, value]} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
});
|
||||
});
|
||||
|
||||
test("arrays chuncked", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value, key, parent, stack) => {
|
||||
const keys = stack
|
||||
.slice(1)
|
||||
.map((item) => item.key)
|
||||
.concat(key !== undefined ? key : []);
|
||||
|
||||
t.same(
|
||||
[keys, value],
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${[keys, value]} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
});
|
||||
});
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const invalidValues = ["[,", "[1, eer]", "[1,]", "[1;", "[1}"];
|
||||
|
||||
t.plan(invalidValues.length);
|
||||
|
||||
invalidValues.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
75
node_modules/@streamparser/json/test/types/booleans.ts
generated
vendored
Normal file
75
node_modules/@streamparser/json/test/types/booleans.ts
generated
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const values = ["true", "false"];
|
||||
const expected = values.map((str) => JSON.parse(str));
|
||||
|
||||
test("boolean", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
});
|
||||
});
|
||||
|
||||
test("boolean chuncked", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
});
|
||||
});
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const invalidValues = [
|
||||
"tRue",
|
||||
"trUe",
|
||||
"truE",
|
||||
"fAlse",
|
||||
"faLse",
|
||||
"falSe",
|
||||
"falsE",
|
||||
];
|
||||
|
||||
t.plan(invalidValues.length);
|
||||
|
||||
invalidValues.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
67
node_modules/@streamparser/json/test/types/null.ts
generated
vendored
Normal file
67
node_modules/@streamparser/json/test/types/null.ts
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const values = ["null"];
|
||||
const expected = values.map((str) => JSON.parse(str));
|
||||
|
||||
test("null", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
});
|
||||
});
|
||||
|
||||
test("null chuncked", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
});
|
||||
});
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const values = ["nUll", "nuLl", "nulL"];
|
||||
|
||||
t.plan(values.length);
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
117
node_modules/@streamparser/json/test/types/numbers.ts
generated
vendored
Normal file
117
node_modules/@streamparser/json/test/types/numbers.ts
generated
vendored
Normal file
@ -0,0 +1,117 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const values = [
|
||||
"0",
|
||||
"0e1",
|
||||
"0e+1",
|
||||
"0e-1",
|
||||
"0.123",
|
||||
"0.123e00",
|
||||
"0.123e+1",
|
||||
"0.123e-1",
|
||||
"0.123E00",
|
||||
"0.123E+1",
|
||||
"0.123E-1",
|
||||
"-0",
|
||||
"-0e1",
|
||||
"-0e+1",
|
||||
"-0e-1",
|
||||
"-0.123",
|
||||
"-0.123e00",
|
||||
"-0.123e+1",
|
||||
"-0.123e-1",
|
||||
"-0.123E00",
|
||||
"-0.123E+1",
|
||||
"-0.123E-1",
|
||||
"-123",
|
||||
"-123e1",
|
||||
"-123e+1",
|
||||
"-123e-1",
|
||||
"-123.123",
|
||||
"-123.123e00",
|
||||
"-123.123e+1",
|
||||
"-123.123e-1",
|
||||
"-123.123E00",
|
||||
"-123.123E+1",
|
||||
"-123.123E-1",
|
||||
"123",
|
||||
"123e1",
|
||||
"123e+1",
|
||||
"123e-1",
|
||||
"123.123",
|
||||
"123.123e00",
|
||||
"123.123e+1",
|
||||
"123.123e-1",
|
||||
"123.123E00",
|
||||
"123.123E+1",
|
||||
"123.123E-1",
|
||||
"7161093205057351174",
|
||||
"21e999",
|
||||
];
|
||||
const expected = values.map((str) => JSON.parse(str));
|
||||
|
||||
for (const numberBufferSize of [0, 64 * 1024]) {
|
||||
test("number", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser({ numberBufferSize });
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
p.end();
|
||||
});
|
||||
});
|
||||
|
||||
test("number chuncked", (t) => {
|
||||
t.plan(expected.length);
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser({ numberBufferSize });
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
p.end();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const values = ["-a", "-e", "1a", "1.a", "1.e", "1.-", "1.0ea", "1.0e1.2"];
|
||||
|
||||
t.plan(values.length);
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
134
node_modules/@streamparser/json/test/types/objects.ts
generated
vendored
Normal file
134
node_modules/@streamparser/json/test/types/objects.ts
generated
vendored
Normal file
@ -0,0 +1,134 @@
|
||||
import { readFileSync } from "fs";
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const values = [
|
||||
"{}",
|
||||
'{ "a": 0, "b": 1, "c": -1 }',
|
||||
'{ "a": 1.0, "b": 1.1, "c": -1.1, "d": -1.0 }',
|
||||
'{ "e": -1 }',
|
||||
'{ "f": -0.1 }',
|
||||
'{ "a": 6.02e23, "b": 6.02e+23, "c": 6.02e-23, "d": 0e23 }',
|
||||
'{ "a": 7161093205057351174 }',
|
||||
];
|
||||
|
||||
const expected = [
|
||||
[[], {}],
|
||||
[["a"], 0],
|
||||
[["b"], 1],
|
||||
[["c"], -1],
|
||||
[[], { a: 0, b: 1, c: -1 }],
|
||||
[["a"], 1],
|
||||
[["b"], 1.1],
|
||||
[["c"], -1.1],
|
||||
[["d"], -1],
|
||||
[[], { a: 1, b: 1.1, c: -1.1, d: -1 }],
|
||||
[["e"], -1],
|
||||
[[], { e: -1 }],
|
||||
[["f"], -0.1],
|
||||
[[], { f: -0.1 }],
|
||||
[["a"], 6.02e23],
|
||||
[["b"], 6.02e23],
|
||||
[["c"], 6.02e-23],
|
||||
[["d"], 0e23],
|
||||
[[], { a: 6.02e23, b: 6.02e23, c: 6.02e-23, d: 0e23 }],
|
||||
[["a"], "7161093205057351174"],
|
||||
[[], { a: "7161093205057351174" }],
|
||||
];
|
||||
|
||||
test("objects", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = function (value) {
|
||||
const keys = this.stack
|
||||
.slice(1)
|
||||
.map((item) => item.key)
|
||||
.concat(this.key !== undefined ? this.key : []);
|
||||
|
||||
t.same(
|
||||
[keys, value],
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${[keys, value]} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(str);
|
||||
});
|
||||
});
|
||||
|
||||
test("objects chuncked", (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = function (value) {
|
||||
const keys = this.stack
|
||||
.slice(1)
|
||||
.map((item) => item.key)
|
||||
.concat(this.key !== undefined ? this.key : []);
|
||||
|
||||
t.same(
|
||||
[keys, value],
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${[keys, value]} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
});
|
||||
});
|
||||
|
||||
test("objects complex ", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const stringifiedJson = readFileSync(
|
||||
`${process.cwd()}/samplejson/basic.json`
|
||||
).toString();
|
||||
|
||||
const p = new JSONParser();
|
||||
p.onValue = (value, key, parent, stack) => {
|
||||
if (stack.length === 0) {
|
||||
t.same(JSON.parse(stringifiedJson), value);
|
||||
}
|
||||
};
|
||||
|
||||
p.write(stringifiedJson);
|
||||
});
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const invalidValues = [
|
||||
"{,",
|
||||
'{"test": eer[ }',
|
||||
"{ test: 1 }",
|
||||
'{ "test": 1 ;',
|
||||
'{ "test": 1 ]',
|
||||
'{ "test": 1, }',
|
||||
'{ "test", }',
|
||||
];
|
||||
|
||||
t.plan(invalidValues.length);
|
||||
|
||||
invalidValues.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(str);
|
||||
t.fail(`Expected to fail on value "${str}"`);
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
233
node_modules/@streamparser/json/test/types/strings.ts
generated
vendored
Normal file
233
node_modules/@streamparser/json/test/types/strings.ts
generated
vendored
Normal file
@ -0,0 +1,233 @@
|
||||
import tap from "tap";
|
||||
import JSONParser from "../../src/jsonparser";
|
||||
import { charset } from "../../src/utils/utf-8";
|
||||
|
||||
const { test } = tap;
|
||||
|
||||
const { QUOTATION_MARK } = charset;
|
||||
|
||||
const quote = String.fromCharCode(QUOTATION_MARK);
|
||||
|
||||
for (const stringBufferSize of [0, 64 * 1024]) {
|
||||
const values = [
|
||||
"Hello world!",
|
||||
'\\r\\n\\f\\t\\\\\\/\\"',
|
||||
"\\u039b\\u03ac\\u03bc\\u03b2\\u03b4\\u03b1",
|
||||
"☃",
|
||||
"├──",
|
||||
"snow: ☃!",
|
||||
"õ",
|
||||
];
|
||||
const expected = values.map((str) => JSON.parse(`"${str}"`));
|
||||
|
||||
test(`simple string with stringBufferSize = ${stringBufferSize}`, (t) => {
|
||||
t.plan(expected.length);
|
||||
|
||||
let i = 0;
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => {
|
||||
t.equal(
|
||||
value,
|
||||
expected[i],
|
||||
`Error on expectation ${i} (${value} !== ${expected[i]})`
|
||||
);
|
||||
i += 1;
|
||||
};
|
||||
|
||||
p.write(quote);
|
||||
str.split("").forEach((c) => p.write(c));
|
||||
p.write(quote);
|
||||
});
|
||||
});
|
||||
|
||||
test("multibyte characters", (t) => {
|
||||
t.plan(5);
|
||||
|
||||
t.test("2 byte utf8 'De' character: д", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "д");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xd0, 0xb4]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
t.test("3 byte utf8 'Han' character: 我", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "我");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xe6, 0x88, 0x91]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
t.test("4 byte utf8 character (unicode scalar U+2070E): 𠜎", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "𠜎");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xf0, 0xa0, 0x9c, 0x8e]));
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
t.test("chunking", (t) => {
|
||||
t.plan(4);
|
||||
|
||||
t.test(
|
||||
"2 byte utf8 'De' character chunked inbetween 1st and 3nd byte: д",
|
||||
(t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "д");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xd0]));
|
||||
p.write(new Uint8Array([0xb4]));
|
||||
p.write(quote);
|
||||
}
|
||||
);
|
||||
|
||||
t.test(
|
||||
"3 byte utf8 'Han' character chunked inbetween 2nd and 3rd byte: 我",
|
||||
(t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "我");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xe6, 0x88]));
|
||||
p.write(new Uint8Array([0x91]));
|
||||
p.write(quote);
|
||||
}
|
||||
);
|
||||
|
||||
t.test(
|
||||
"4 byte utf8 character (unicode scalar U+2070E) chunked inbetween 2nd and 3rd byte: 𠜎",
|
||||
(t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "𠜎");
|
||||
|
||||
p.write(quote);
|
||||
p.write(new Uint8Array([0xf0, 0xa0]));
|
||||
p.write(new Uint8Array([0x9c, 0x8e]));
|
||||
p.write(quote);
|
||||
}
|
||||
);
|
||||
|
||||
t.test(
|
||||
"1-4 byte utf8 character string chunked inbetween random bytes: Aж文𠜱B",
|
||||
(t) => {
|
||||
t.plan(11);
|
||||
|
||||
const eclectic_buffer = new Uint8Array([
|
||||
0x41, // A
|
||||
0xd0,
|
||||
0xb6, // ж
|
||||
0xe6,
|
||||
0x96,
|
||||
0x87, // 文
|
||||
0xf0,
|
||||
0xa0,
|
||||
0x9c,
|
||||
0xb1, // 𠜱
|
||||
0x42,
|
||||
]); // B
|
||||
|
||||
for (let i = 0; i < 11; i++) {
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "Aж文𠜱B");
|
||||
|
||||
const first_buffer = eclectic_buffer.slice(0, i);
|
||||
const second_buffer = eclectic_buffer.slice(i);
|
||||
p.write(quote);
|
||||
p.write(first_buffer);
|
||||
p.write(second_buffer);
|
||||
p.write(quote);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
t.test("surrogate", (t) => {
|
||||
t.plan(3);
|
||||
|
||||
t.test("parse surrogate pair", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "😋");
|
||||
|
||||
p.write('"\\uD83D\\uDE0B"');
|
||||
});
|
||||
|
||||
t.test("parse chunked surrogate pair", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "😋");
|
||||
|
||||
p.write(quote);
|
||||
p.write("\\uD83D");
|
||||
p.write("\\uDE0B");
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
t.test("not error on broken surrogate pair", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize });
|
||||
p.onValue = (value) => t.equal(value, "<22>");
|
||||
|
||||
p.write(quote);
|
||||
p.write("\\uD83D\\uEFFF");
|
||||
p.write(quote);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
test("should flush the buffer if there is not space for incoming data", (t) => {
|
||||
t.plan(1);
|
||||
|
||||
const p = new JSONParser({ stringBufferSize: 5 });
|
||||
p.onValue = (value) => t.equal(value, "aaaa𠜎");
|
||||
|
||||
p.write(quote);
|
||||
p.write("aaaa");
|
||||
p.write("𠜎");
|
||||
p.write(quote);
|
||||
});
|
||||
|
||||
test("fail on invalid values", (t) => {
|
||||
const values = ["\n", "\\j", "\\ua", "\\u1*", "\\u12*", "\\u123*"];
|
||||
|
||||
t.plan(values.length);
|
||||
|
||||
values.forEach((str) => {
|
||||
const p = new JSONParser();
|
||||
p.onValue = () => {
|
||||
/* Do nothing */
|
||||
};
|
||||
|
||||
try {
|
||||
p.write(quote);
|
||||
p.write(str);
|
||||
p.write(quote);
|
||||
t.fail("Expected to fail");
|
||||
} catch (e) {
|
||||
t.pass();
|
||||
}
|
||||
});
|
||||
});
|
||||
18
node_modules/@streamparser/json/tsconfig.json
generated
vendored
Normal file
18
node_modules/@streamparser/json/tsconfig.json
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"module": "ESNext",
|
||||
"lib": ["es2020", "dom"],
|
||||
"declaration": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
// "exclude": [
|
||||
// "node_modules",
|
||||
// "dist",
|
||||
// "test"
|
||||
// ]
|
||||
}
|
||||
361
node_modules/commander/CHANGELOG.md
generated
vendored
Normal file
361
node_modules/commander/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,361 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). (Format adopted after v3.0.0.)
|
||||
|
||||
<!-- markdownlint-disable MD024 -->
|
||||
<!-- markdownlint-disable MD004 -->
|
||||
|
||||
## [6.2.1] (2020-12-13)
|
||||
|
||||
### Fixed
|
||||
|
||||
- some tests failed if directory path included a space ([1390])
|
||||
|
||||
## [6.2.0] (2020-10-25)
|
||||
|
||||
### Added
|
||||
|
||||
- added 'tsx' file extension for stand-alone executable subcommands ([#1368])
|
||||
- documented second parameter to `.description()` to describe command arguments ([#1353])
|
||||
- documentation of special cases with options taking varying numbers of option-arguments ([#1332])
|
||||
- documentation for terminology ([#1361])
|
||||
|
||||
### Fixed
|
||||
|
||||
- add missing TypeScript definition for `.addHelpCommand()' ([#1375])
|
||||
- removed blank line after "Arguments:" in help, to match "Options:" and "Commands:" ([#1360])
|
||||
|
||||
### Changed
|
||||
|
||||
- update dependencies
|
||||
|
||||
## [6.1.0] (2020-08-28)
|
||||
|
||||
### Added
|
||||
|
||||
- include URL to relevant section of README for error for potential conflict between Command properties and option values ([#1306])
|
||||
- `.combineFlagAndOptionalValue(false)` to ease upgrade path from older versions of Commander ([#1326])
|
||||
- allow disabling the built-in help option using `.helpOption(false)` ([#1325])
|
||||
- allow just some arguments in `argumentDescription` to `.description()` ([#1323])
|
||||
|
||||
### Changed
|
||||
|
||||
- tidy async test and remove lint override ([#1312])
|
||||
|
||||
### Fixed
|
||||
|
||||
- executable subcommand launching when script path not known ([#1322])
|
||||
|
||||
## [6.0.0] (2020-07-21)
|
||||
|
||||
### Added
|
||||
|
||||
- add support for variadic options ([#1250])
|
||||
- allow options to be added with just a short flag ([#1256])
|
||||
- *Breaking* the option property has same case as flag. e.g. flag `-n` accessed as `opts().n` (previously uppercase)
|
||||
- *Breaking* throw an error if there might be a clash between option name and a Command property, with advice on how to resolve ([#1275])
|
||||
|
||||
### Fixed
|
||||
|
||||
- Options which contain -no- in the middle of the option flag should not be treated as negatable. ([#1301])
|
||||
|
||||
## [6.0.0-0] (2020-06-20)
|
||||
|
||||
(Released in 6.0.0)
|
||||
|
||||
## [5.1.0] (2020-04-25)
|
||||
|
||||
### Added
|
||||
|
||||
- support for multiple command aliases, the first of which is shown in the auto-generated help ([#531], [#1236])
|
||||
- configuration support in `addCommand()` for `hidden` and `isDefault` ([#1232])
|
||||
|
||||
### Fixed
|
||||
|
||||
- omit masked help flags from the displayed help ([#645], [#1247])
|
||||
- remove old short help flag when change help flags using `helpOption` ([#1248])
|
||||
|
||||
### Changed
|
||||
|
||||
- remove use of `arguments` to improve auto-generated help in editors ([#1235])
|
||||
- rename `.command()` configuration `noHelp` to `hidden` (but not remove old support) ([#1232])
|
||||
- improvements to documentation
|
||||
- update dependencies
|
||||
- update tested versions of node
|
||||
- eliminate lint errors in TypeScript ([#1208])
|
||||
|
||||
## [5.0.0] (2020-03-14)
|
||||
|
||||
### Added
|
||||
|
||||
* support for nested commands with action-handlers ([#1] [#764] [#1149])
|
||||
* `.addCommand()` for adding a separately configured command ([#764] [#1149])
|
||||
* allow a non-executable to be set as the default command ([#742] [#1149])
|
||||
* implicit help command when there are subcommands (previously only if executables) ([#1149])
|
||||
* customise implicit help command with `.addHelpCommand()` ([#1149])
|
||||
* display error message for unknown subcommand, by default ([#432] [#1088] [#1149])
|
||||
* display help for missing subcommand, by default ([#1088] [#1149])
|
||||
* combined short options as single argument may include boolean flags and value flag and value (e.g. `-a -b -p 80` can be written as `-abp80`) ([#1145])
|
||||
* `.parseOption()` includes short flag and long flag expansions ([#1145])
|
||||
* `.helpInformation()` returns help text as a string, previously a private routine ([#1169])
|
||||
* `.parse()` implicitly uses `process.argv` if arguments not specified ([#1172])
|
||||
* optionally specify where `.parse()` arguments "from", if not following node conventions ([#512] [#1172])
|
||||
* suggest help option along with unknown command error ([#1179])
|
||||
* TypeScript definition for `commands` property of `Command` ([#1184])
|
||||
* export `program` property ([#1195])
|
||||
* `createCommand` factory method to simplify subclassing ([#1191])
|
||||
|
||||
### Fixed
|
||||
|
||||
* preserve argument order in subcommands ([#508] [#962] [#1138])
|
||||
* do not emit `command:*` for executable subcommands ([#809] [#1149])
|
||||
* action handler called whether or not there are non-option arguments ([#1062] [#1149])
|
||||
* combining option short flag and value in single argument now works for subcommands ([#1145])
|
||||
* only add implicit help command when it will not conflict with other uses of argument ([#1153] [#1149])
|
||||
* implicit help command works with command aliases ([#948] [#1149])
|
||||
* options are validated whether or not there is an action handler ([#1149])
|
||||
|
||||
### Changed
|
||||
|
||||
* *Breaking* `.args` contains command arguments with just recognised options removed ([#1032] [#1138])
|
||||
* *Breaking* display error if required argument for command is missing ([#995] [#1149])
|
||||
* tighten TypeScript definition of custom option processing function passed to `.option()` ([#1119])
|
||||
* *Breaking* `.allowUnknownOption()` ([#802] [#1138])
|
||||
* unknown options included in arguments passed to command action handler
|
||||
* unknown options included in `.args`
|
||||
* only recognised option short flags and long flags are expanded (e.g. `-ab` or `--foo=bar`) ([#1145])
|
||||
* *Breaking* `.parseOptions()` ([#1138])
|
||||
* `args` in returned result renamed `operands` and does not include anything after first unknown option
|
||||
* `unknown` in returned result has arguments after first unknown option including operands, not just options and values
|
||||
* *Breaking* `.on('command:*', callback)` and other command events passed (changed) results from `.parseOptions`, i.e. operands and unknown ([#1138])
|
||||
* refactor Option from prototype to class ([#1133])
|
||||
* refactor Command from prototype to class ([#1159])
|
||||
* changes to error handling ([#1165])
|
||||
* throw for author error, not just display message
|
||||
* preflight for variadic error
|
||||
* add tips to missing subcommand executable
|
||||
* TypeScript fluent return types changed to be more subclass friendly, return `this` rather than `Command` ([#1180])
|
||||
* `.parseAsync` returns `Promise<this>` to be consistent with `.parse()` ([#1180])
|
||||
* update dependencies
|
||||
|
||||
### Removed
|
||||
|
||||
* removed EventEmitter from TypeScript definition for Command, eliminating implicit peer dependency on `@types/node` ([#1146])
|
||||
* removed private function `normalize` (the functionality has been integrated into `parseOptions`) ([#1145])
|
||||
* `parseExpectedArgs` is now private ([#1149])
|
||||
|
||||
### Migration Tips
|
||||
|
||||
If you use `.on('command:*')` or more complicated tests to detect an unrecognised subcommand, you may be able to delete the code and rely on the default behaviour.
|
||||
|
||||
If you use `program.args` or more complicated tests to detect a missing subcommand, you may be able to delete the code and rely on the default behaviour.
|
||||
|
||||
If you use `.command('*')` to add a default command, you may be be able to switch to `isDefault:true` with a named command.
|
||||
|
||||
If you want to continue combining short options with optional values as though they were boolean flags, set `combineFlagAndOptionalValue(false)`
|
||||
to expand `-fb` to `-f -b` rather than `-f b`.
|
||||
|
||||
## [5.0.0-4] (2020-03-03)
|
||||
|
||||
(Released in 5.0.0)
|
||||
|
||||
## [5.0.0-3] (2020-02-20)
|
||||
|
||||
(Released in 5.0.0)
|
||||
|
||||
## [5.0.0-2] (2020-02-10)
|
||||
|
||||
(Released in 5.0.0)
|
||||
|
||||
## [5.0.0-1] (2020-02-08)
|
||||
|
||||
(Released in 5.0.0)
|
||||
|
||||
## [5.0.0-0] (2020-02-02)
|
||||
|
||||
(Released in 5.0.0)
|
||||
|
||||
## [4.1.1] (2020-02-02)
|
||||
|
||||
### Fixed
|
||||
|
||||
* TypeScript definition for `.action()` should include Promise for async ([#1157])
|
||||
|
||||
## [4.1.0] (2020-01-06)
|
||||
|
||||
### Added
|
||||
|
||||
* two routines to change how option values are handled, and eliminate name clashes with command properties ([#933] [#1102])
|
||||
* see storeOptionsAsProperties and passCommandToAction in README
|
||||
* `.parseAsync` to use instead of `.parse` if supply async action handlers ([#806] [#1118])
|
||||
|
||||
### Fixed
|
||||
|
||||
* Remove trailing blanks from wrapped help text ([#1096])
|
||||
|
||||
### Changed
|
||||
|
||||
* update dependencies
|
||||
* extend security coverage for Commander 2.x to 2020-02-03
|
||||
* improvements to README
|
||||
* improvements to TypeScript definition documentation
|
||||
* move old versions out of main CHANGELOG
|
||||
* removed explicit use of `ts-node` in tests
|
||||
|
||||
## [4.0.1] (2019-11-12)
|
||||
|
||||
### Fixed
|
||||
|
||||
* display help when requested, even if there are missing required options ([#1091])
|
||||
|
||||
## [4.0.0] (2019-11-02)
|
||||
|
||||
### Added
|
||||
|
||||
* automatically wrap and indent help descriptions for options and commands ([#1051])
|
||||
* `.exitOverride()` allows override of calls to `process.exit` for additional error handling and to keep program running ([#1040])
|
||||
* support for declaring required options with `.requiredOptions()` ([#1071])
|
||||
* GitHub Actions support ([#1027])
|
||||
* translation links in README
|
||||
|
||||
### Changed
|
||||
|
||||
* dev: switch tests from Sinon+Should to Jest with major rewrite of tests ([#1035])
|
||||
* call default subcommand even when there are unknown options ([#1047])
|
||||
* *Breaking* Commander is only officially supported on Node 8 and above, and requires Node 6 ([#1053])
|
||||
|
||||
### Fixed
|
||||
|
||||
* *Breaking* keep command object out of program.args when action handler called ([#1048])
|
||||
* also, action handler now passed array of unknown arguments
|
||||
* complain about unknown options when program argument supplied and action handler ([#1049])
|
||||
* this changes parameters to `command:*` event to include unknown arguments
|
||||
* removed deprecated `customFds` option from call to `child_process.spawn` ([#1052])
|
||||
* rework TypeScript declarations to bring all types into imported namespace ([#1081])
|
||||
|
||||
### Migration Tips
|
||||
|
||||
#### Testing for no arguments
|
||||
|
||||
If you were previously using code like:
|
||||
|
||||
```js
|
||||
if (!program.args.length) ...
|
||||
```
|
||||
|
||||
a partial replacement is:
|
||||
|
||||
```js
|
||||
if (program.rawArgs.length < 3) ...
|
||||
```
|
||||
|
||||
## [4.0.0-1] Prerelease (2019-10-08)
|
||||
|
||||
(Released in 4.0.0)
|
||||
|
||||
## [4.0.0-0] Prerelease (2019-10-01)
|
||||
|
||||
(Released in 4.0.0)
|
||||
|
||||
## Older versions
|
||||
|
||||
* [3.x](./changelogs/CHANGELOG-3.md)
|
||||
* [2.x](./changelogs/CHANGELOG-2.md)
|
||||
* [1.x](./changelogs/CHANGELOG-1.md)
|
||||
* [0.x](./changelogs/CHANGELOG-0.md)
|
||||
|
||||
[#1]: https://github.com/tj/commander.js/issues/1
|
||||
[#432]: https://github.com/tj/commander.js/issues/432
|
||||
[#508]: https://github.com/tj/commander.js/issues/508
|
||||
[#512]: https://github.com/tj/commander.js/issues/512
|
||||
[#531]: https://github.com/tj/commander.js/issues/531
|
||||
[#645]: https://github.com/tj/commander.js/issues/645
|
||||
[#742]: https://github.com/tj/commander.js/issues/742
|
||||
[#764]: https://github.com/tj/commander.js/issues/764
|
||||
[#802]: https://github.com/tj/commander.js/issues/802
|
||||
[#806]: https://github.com/tj/commander.js/issues/806
|
||||
[#809]: https://github.com/tj/commander.js/issues/809
|
||||
[#948]: https://github.com/tj/commander.js/issues/948
|
||||
[#962]: https://github.com/tj/commander.js/issues/962
|
||||
[#995]: https://github.com/tj/commander.js/issues/995
|
||||
[#1027]: https://github.com/tj/commander.js/pull/1027
|
||||
[#1032]: https://github.com/tj/commander.js/issues/1032
|
||||
[#1035]: https://github.com/tj/commander.js/pull/1035
|
||||
[#1040]: https://github.com/tj/commander.js/pull/1040
|
||||
[#1047]: https://github.com/tj/commander.js/pull/1047
|
||||
[#1048]: https://github.com/tj/commander.js/pull/1048
|
||||
[#1049]: https://github.com/tj/commander.js/pull/1049
|
||||
[#1051]: https://github.com/tj/commander.js/pull/1051
|
||||
[#1052]: https://github.com/tj/commander.js/pull/1052
|
||||
[#1053]: https://github.com/tj/commander.js/pull/1053
|
||||
[#1062]: https://github.com/tj/commander.js/pull/1062
|
||||
[#1071]: https://github.com/tj/commander.js/pull/1071
|
||||
[#1081]: https://github.com/tj/commander.js/pull/1081
|
||||
[#1088]: https://github.com/tj/commander.js/issues/1088
|
||||
[#1091]: https://github.com/tj/commander.js/pull/1091
|
||||
[#1096]: https://github.com/tj/commander.js/pull/1096
|
||||
[#1102]: https://github.com/tj/commander.js/pull/1102
|
||||
[#1118]: https://github.com/tj/commander.js/pull/1118
|
||||
[#1119]: https://github.com/tj/commander.js/pull/1119
|
||||
[#1133]: https://github.com/tj/commander.js/pull/1133
|
||||
[#1138]: https://github.com/tj/commander.js/pull/1138
|
||||
[#1145]: https://github.com/tj/commander.js/pull/1145
|
||||
[#1146]: https://github.com/tj/commander.js/pull/1146
|
||||
[#1149]: https://github.com/tj/commander.js/pull/1149
|
||||
[#1153]: https://github.com/tj/commander.js/issues/1153
|
||||
[#1157]: https://github.com/tj/commander.js/pull/1157
|
||||
[#1159]: https://github.com/tj/commander.js/pull/1159
|
||||
[#1165]: https://github.com/tj/commander.js/pull/1165
|
||||
[#1169]: https://github.com/tj/commander.js/pull/1169
|
||||
[#1172]: https://github.com/tj/commander.js/pull/1172
|
||||
[#1179]: https://github.com/tj/commander.js/pull/1179
|
||||
[#1180]: https://github.com/tj/commander.js/pull/1180
|
||||
[#1184]: https://github.com/tj/commander.js/pull/1184
|
||||
[#1191]: https://github.com/tj/commander.js/pull/1191
|
||||
[#1195]: https://github.com/tj/commander.js/pull/1195
|
||||
[#1208]: https://github.com/tj/commander.js/pull/1208
|
||||
[#1232]: https://github.com/tj/commander.js/pull/1232
|
||||
[#1235]: https://github.com/tj/commander.js/pull/1235
|
||||
[#1236]: https://github.com/tj/commander.js/pull/1236
|
||||
[#1247]: https://github.com/tj/commander.js/pull/1247
|
||||
[#1248]: https://github.com/tj/commander.js/pull/1248
|
||||
[#1250]: https://github.com/tj/commander.js/pull/1250
|
||||
[#1256]: https://github.com/tj/commander.js/pull/1256
|
||||
[#1275]: https://github.com/tj/commander.js/pull/1275
|
||||
[#1301]: https://github.com/tj/commander.js/issues/1301
|
||||
[#1306]: https://github.com/tj/commander.js/pull/1306
|
||||
[#1312]: https://github.com/tj/commander.js/pull/1312
|
||||
[#1322]: https://github.com/tj/commander.js/pull/1322
|
||||
[#1323]: https://github.com/tj/commander.js/pull/1323
|
||||
[#1325]: https://github.com/tj/commander.js/pull/1325
|
||||
[#1326]: https://github.com/tj/commander.js/pull/1326
|
||||
[#1332]: https://github.com/tj/commander.js/pull/1332
|
||||
[#1353]: https://github.com/tj/commander.js/pull/1353
|
||||
[#1360]: https://github.com/tj/commander.js/pull/1360
|
||||
[#1361]: https://github.com/tj/commander.js/pull/1361
|
||||
[#1368]: https://github.com/tj/commander.js/pull/1368
|
||||
[#1375]: https://github.com/tj/commander.js/pull/1375
|
||||
[#1390]: https://github.com/tj/commander.js/pull/1390
|
||||
|
||||
[Unreleased]: https://github.com/tj/commander.js/compare/master...develop
|
||||
[6.2.1]: https://github.com/tj/commander.js/compare/v6.2.0..v6.2.1
|
||||
[6.2.0]: https://github.com/tj/commander.js/compare/v6.1.0..v6.2.0
|
||||
[6.1.0]: https://github.com/tj/commander.js/compare/v6.0.0..v6.1.0
|
||||
[6.0.0]: https://github.com/tj/commander.js/compare/v5.1.0..v6.0.0
|
||||
[6.0.0-0]: https://github.com/tj/commander.js/compare/v5.1.0..v6.0.0-0
|
||||
[5.1.0]: https://github.com/tj/commander.js/compare/v5.0.0..v5.1.0
|
||||
[5.0.0]: https://github.com/tj/commander.js/compare/v4.1.1..v5.0.0
|
||||
[5.0.0-4]: https://github.com/tj/commander.js/compare/v5.0.0-3..v5.0.0-4
|
||||
[5.0.0-3]: https://github.com/tj/commander.js/compare/v5.0.0-2..v5.0.0-3
|
||||
[5.0.0-2]: https://github.com/tj/commander.js/compare/v5.0.0-1..v5.0.0-2
|
||||
[5.0.0-1]: https://github.com/tj/commander.js/compare/v5.0.0-0..v5.0.0-1
|
||||
[5.0.0-0]: https://github.com/tj/commander.js/compare/v4.1.1..v5.0.0-0
|
||||
[4.1.1]: https://github.com/tj/commander.js/compare/v4.1.0..v4.1.1
|
||||
[4.1.0]: https://github.com/tj/commander.js/compare/v4.0.1..v4.1.0
|
||||
[4.0.1]: https://github.com/tj/commander.js/compare/v4.0.0..v4.0.1
|
||||
[4.0.0]: https://github.com/tj/commander.js/compare/v3.0.2..v4.0.0
|
||||
[4.0.0-1]: https://github.com/tj/commander.js/compare/v4.0.0-0..v4.0.0-1
|
||||
[4.0.0-0]: https://github.com/tj/commander.js/compare/v3.0.2...v4.0.0-0
|
||||
22
node_modules/commander/LICENSE
generated
vendored
Normal file
22
node_modules/commander/LICENSE
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2011 TJ Holowaychuk <tj@vision-media.ca>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
'Software'), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
791
node_modules/commander/Readme.md
generated
vendored
Normal file
791
node_modules/commander/Readme.md
generated
vendored
Normal file
@ -0,0 +1,791 @@
|
||||
# Commander.js
|
||||
|
||||
[](http://travis-ci.org/tj/commander.js)
|
||||
[](https://www.npmjs.org/package/commander)
|
||||
[](https://npmcharts.com/compare/commander?minimal=true)
|
||||
[](https://packagephobia.now.sh/result?p=commander)
|
||||
|
||||
The complete solution for [node.js](http://nodejs.org) command-line interfaces.
|
||||
|
||||
Read this in other languages: English | [简体中文](./Readme_zh-CN.md)
|
||||
|
||||
- [Commander.js](#commanderjs)
|
||||
- [Installation](#installation)
|
||||
- [Declaring _program_ variable](#declaring-program-variable)
|
||||
- [Options](#options)
|
||||
- [Common option types, boolean and value](#common-option-types-boolean-and-value)
|
||||
- [Default option value](#default-option-value)
|
||||
- [Other option types, negatable boolean and boolean|value](#other-option-types-negatable-boolean-and-booleanvalue)
|
||||
- [Custom option processing](#custom-option-processing)
|
||||
- [Required option](#required-option)
|
||||
- [Variadic option](#variadic-option)
|
||||
- [Version option](#version-option)
|
||||
- [Commands](#commands)
|
||||
- [Specify the argument syntax](#specify-the-argument-syntax)
|
||||
- [Action handler (sub)commands](#action-handler-subcommands)
|
||||
- [Stand-alone executable (sub)commands](#stand-alone-executable-subcommands)
|
||||
- [Automated help](#automated-help)
|
||||
- [Custom help](#custom-help)
|
||||
- [.usage and .name](#usage-and-name)
|
||||
- [.help(cb)](#helpcb)
|
||||
- [.outputHelp(cb)](#outputhelpcb)
|
||||
- [.helpInformation()](#helpinformation)
|
||||
- [.helpOption(flags, description)](#helpoptionflags-description)
|
||||
- [.addHelpCommand()](#addhelpcommand)
|
||||
- [Custom event listeners](#custom-event-listeners)
|
||||
- [Bits and pieces](#bits-and-pieces)
|
||||
- [.parse() and .parseAsync()](#parse-and-parseasync)
|
||||
- [Avoiding option name clashes](#avoiding-option-name-clashes)
|
||||
- [TypeScript](#typescript)
|
||||
- [createCommand()](#createcommand)
|
||||
- [Import into ECMAScript Module](#import-into-ecmascript-module)
|
||||
- [Node options such as `--harmony`](#node-options-such-as---harmony)
|
||||
- [Debugging stand-alone executable subcommands](#debugging-stand-alone-executable-subcommands)
|
||||
- [Override exit handling](#override-exit-handling)
|
||||
- [Examples](#examples)
|
||||
- [Support](#support)
|
||||
- [Commander for enterprise](#commander-for-enterprise)
|
||||
|
||||
For information about terms used in this document see: [terminology](./docs/terminology.md)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install commander
|
||||
```
|
||||
|
||||
## Declaring _program_ variable
|
||||
|
||||
Commander exports a global object which is convenient for quick programs.
|
||||
This is used in the examples in this README for brevity.
|
||||
|
||||
```js
|
||||
const { program } = require('commander');
|
||||
program.version('0.0.1');
|
||||
```
|
||||
|
||||
For larger programs which may use commander in multiple ways, including unit testing, it is better to create a local Command object to use.
|
||||
|
||||
```js
|
||||
const { Command } = require('commander');
|
||||
const program = new Command();
|
||||
program.version('0.0.1');
|
||||
```
|
||||
|
||||
## Options
|
||||
|
||||
Options are defined with the `.option()` method, also serving as documentation for the options. Each option can have a short flag (single character) and a long name, separated by a comma or space or vertical bar ('|').
|
||||
|
||||
The options can be accessed as properties on the Command object. Multi-word options such as "--template-engine" are camel-cased, becoming `program.templateEngine` etc. See also optional new behaviour to [avoid name clashes](#avoiding-option-name-clashes).
|
||||
|
||||
Multiple short flags may optionally be combined in a single argument following the dash: boolean flags, followed by a single option taking a value (possibly followed by the value).
|
||||
For example `-a -b -p 80` may be written as `-ab -p80` or even `-abp80`.
|
||||
|
||||
You can use `--` to indicate the end of the options, and any remaining arguments will be used without being interpreted.
|
||||
|
||||
Options on the command line are not positional, and can be specified before or after other arguments.
|
||||
|
||||
### Common option types, boolean and value
|
||||
|
||||
The two most used option types are a boolean option, and an option which takes its value
|
||||
from the following argument (declared with angle brackets like `--expect <value>`). Both are `undefined` unless specified on command line.
|
||||
|
||||
Example file: [options-common.js](./examples/options-common.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('-d, --debug', 'output extra debugging')
|
||||
.option('-s, --small', 'small pizza size')
|
||||
.option('-p, --pizza-type <type>', 'flavour of pizza');
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
if (program.debug) console.log(program.opts());
|
||||
console.log('pizza details:');
|
||||
if (program.small) console.log('- small pizza size');
|
||||
if (program.pizzaType) console.log(`- ${program.pizzaType}`);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ pizza-options -d
|
||||
{ debug: true, small: undefined, pizzaType: undefined }
|
||||
pizza details:
|
||||
$ pizza-options -p
|
||||
error: option '-p, --pizza-type <type>' argument missing
|
||||
$ pizza-options -ds -p vegetarian
|
||||
{ debug: true, small: true, pizzaType: 'vegetarian' }
|
||||
pizza details:
|
||||
- small pizza size
|
||||
- vegetarian
|
||||
$ pizza-options --pizza-type=cheese
|
||||
pizza details:
|
||||
- cheese
|
||||
```
|
||||
|
||||
`program.parse(arguments)` processes the arguments, leaving any args not consumed by the program options in the `program.args` array.
|
||||
|
||||
### Default option value
|
||||
|
||||
You can specify a default value for an option which takes a value.
|
||||
|
||||
Example file: [options-defaults.js](./examples/options-defaults.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('-c, --cheese <type>', 'add the specified type of cheese', 'blue');
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
console.log(`cheese: ${program.cheese}`);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ pizza-options
|
||||
cheese: blue
|
||||
$ pizza-options --cheese stilton
|
||||
cheese: stilton
|
||||
```
|
||||
|
||||
### Other option types, negatable boolean and boolean|value
|
||||
|
||||
You can define a boolean option long name with a leading `no-` to set the option value to false when used.
|
||||
Defined alone this also makes the option true by default.
|
||||
|
||||
If you define `--foo` first, adding `--no-foo` does not change the default value from what it would
|
||||
otherwise be. You can specify a default boolean value for a boolean option and it can be overridden on command line.
|
||||
|
||||
Example file: [options-negatable.js](./examples/options-negatable.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('--no-sauce', 'Remove sauce')
|
||||
.option('--cheese <flavour>', 'cheese flavour', 'mozzarella')
|
||||
.option('--no-cheese', 'plain with no cheese')
|
||||
.parse(process.argv);
|
||||
|
||||
const sauceStr = program.sauce ? 'sauce' : 'no sauce';
|
||||
const cheeseStr = (program.cheese === false) ? 'no cheese' : `${program.cheese} cheese`;
|
||||
console.log(`You ordered a pizza with ${sauceStr} and ${cheeseStr}`);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ pizza-options
|
||||
You ordered a pizza with sauce and mozzarella cheese
|
||||
$ pizza-options --sauce
|
||||
error: unknown option '--sauce'
|
||||
$ pizza-options --cheese=blue
|
||||
You ordered a pizza with sauce and blue cheese
|
||||
$ pizza-options --no-sauce --no-cheese
|
||||
You ordered a pizza with no sauce and no cheese
|
||||
```
|
||||
|
||||
You can specify an option which may be used as a boolean option but may optionally take an option-argument
|
||||
(declared with square brackets like `--optional [value]`).
|
||||
|
||||
Example file: [options-boolean-or-value.js](./examples/options-boolean-or-value.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('-c, --cheese [type]', 'Add cheese with optional type');
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
if (program.cheese === undefined) console.log('no cheese');
|
||||
else if (program.cheese === true) console.log('add cheese');
|
||||
else console.log(`add cheese type ${program.cheese}`);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ pizza-options
|
||||
no cheese
|
||||
$ pizza-options --cheese
|
||||
add cheese
|
||||
$ pizza-options --cheese mozzarella
|
||||
add cheese type mozzarella
|
||||
```
|
||||
|
||||
For information about possible ambiguous cases, see [options taking varying arguments](./docs/options-taking-varying-arguments.md).
|
||||
|
||||
### Custom option processing
|
||||
|
||||
You may specify a function to do custom processing of option-arguments. The callback function receives two parameters,
|
||||
the user specified option-argument and the previous value for the option. It returns the new value for the option.
|
||||
|
||||
This allows you to coerce the option-argument to the desired type, or accumulate values, or do entirely custom processing.
|
||||
|
||||
You can optionally specify the default/starting value for the option after the function parameter.
|
||||
|
||||
Example file: [options-custom-processing.js](./examples/options-custom-processing.js)
|
||||
|
||||
```js
|
||||
function myParseInt(value, dummyPrevious) {
|
||||
// parseInt takes a string and an optional radix
|
||||
return parseInt(value);
|
||||
}
|
||||
|
||||
function increaseVerbosity(dummyValue, previous) {
|
||||
return previous + 1;
|
||||
}
|
||||
|
||||
function collect(value, previous) {
|
||||
return previous.concat([value]);
|
||||
}
|
||||
|
||||
function commaSeparatedList(value, dummyPrevious) {
|
||||
return value.split(',');
|
||||
}
|
||||
|
||||
program
|
||||
.option('-f, --float <number>', 'float argument', parseFloat)
|
||||
.option('-i, --integer <number>', 'integer argument', myParseInt)
|
||||
.option('-v, --verbose', 'verbosity that can be increased', increaseVerbosity, 0)
|
||||
.option('-c, --collect <value>', 'repeatable value', collect, [])
|
||||
.option('-l, --list <items>', 'comma separated list', commaSeparatedList)
|
||||
;
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
if (program.float !== undefined) console.log(`float: ${program.float}`);
|
||||
if (program.integer !== undefined) console.log(`integer: ${program.integer}`);
|
||||
if (program.verbose > 0) console.log(`verbosity: ${program.verbose}`);
|
||||
if (program.collect.length > 0) console.log(program.collect);
|
||||
if (program.list !== undefined) console.log(program.list);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ custom -f 1e2
|
||||
float: 100
|
||||
$ custom --integer 2
|
||||
integer: 2
|
||||
$ custom -v -v -v
|
||||
verbose: 3
|
||||
$ custom -c a -c b -c c
|
||||
[ 'a', 'b', 'c' ]
|
||||
$ custom --list x,y,z
|
||||
[ 'x', 'y', 'z' ]
|
||||
```
|
||||
|
||||
### Required option
|
||||
|
||||
You may specify a required (mandatory) option using `.requiredOption`. The option must have a value after parsing, usually specified on the command line, or perhaps from a default value (say from environment). The method is otherwise the same as `.option` in format, taking flags and description, and optional default value or custom processing.
|
||||
|
||||
Example file: [options-required.js](./examples/options-required.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.requiredOption('-c, --cheese <type>', 'pizza must have cheese');
|
||||
|
||||
program.parse(process.argv);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ pizza
|
||||
error: required option '-c, --cheese <type>' not specified
|
||||
```
|
||||
|
||||
### Variadic option
|
||||
|
||||
You may make an option variadic by appending `...` to the value placeholder when declaring the option. On the command line you
|
||||
can then specify multiple option-arguments, and the parsed option value will be an array. The extra arguments
|
||||
are read until the first argument starting with a dash. The special argument `--` stops option processing entirely. If a value
|
||||
is specified in the same argument as the option then no further values are read.
|
||||
|
||||
Example file: [options-variadic.js](./examples/options-variadic.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('-n, --number <numbers...>', 'specify numbers')
|
||||
.option('-l, --letter [letters...]', 'specify letters');
|
||||
|
||||
program.parse();
|
||||
|
||||
console.log('Options: ', program.opts());
|
||||
console.log('Remaining arguments: ', program.args);
|
||||
```
|
||||
|
||||
```bash
|
||||
$ collect -n 1 2 3 --letter a b c
|
||||
Options: { number: [ '1', '2', '3' ], letter: [ 'a', 'b', 'c' ] }
|
||||
Remaining arguments: []
|
||||
$ collect --letter=A -n80 operand
|
||||
Options: { number: [ '80' ], letter: [ 'A' ] }
|
||||
Remaining arguments: [ 'operand' ]
|
||||
$ collect --letter -n 1 -n 2 3 -- operand
|
||||
Options: { number: [ '1', '2', '3' ], letter: true }
|
||||
Remaining arguments: [ 'operand' ]
|
||||
```
|
||||
|
||||
For information about possible ambiguous cases, see [options taking varying arguments](./docs/options-taking-varying-arguments.md).
|
||||
|
||||
### Version option
|
||||
|
||||
The optional `version` method adds handling for displaying the command version. The default option flags are `-V` and `--version`, and when present the command prints the version number and exits.
|
||||
|
||||
```js
|
||||
program.version('0.0.1');
|
||||
```
|
||||
|
||||
```bash
|
||||
$ ./examples/pizza -V
|
||||
0.0.1
|
||||
```
|
||||
|
||||
You may change the flags and description by passing additional parameters to the `version` method, using
|
||||
the same syntax for flags as the `option` method.
|
||||
|
||||
```js
|
||||
program.version('0.0.1', '-v, --vers', 'output the current version');
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
You can specify (sub)commands using `.command()` or `.addCommand()`. There are two ways these can be implemented: using an action handler attached to the command, or as a stand-alone executable file (described in more detail later). The subcommands may be nested ([example](./examples/nestedCommands.js)).
|
||||
|
||||
In the first parameter to `.command()` you specify the command name and any command-arguments. The arguments may be `<required>` or `[optional]`, and the last argument may also be `variadic...`.
|
||||
|
||||
You can use `.addCommand()` to add an already configured subcommand to the program.
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
// Command implemented using action handler (description is supplied separately to `.command`)
|
||||
// Returns new command for configuring.
|
||||
program
|
||||
.command('clone <source> [destination]')
|
||||
.description('clone a repository into a newly created directory')
|
||||
.action((source, destination) => {
|
||||
console.log('clone command called');
|
||||
});
|
||||
|
||||
// Command implemented using stand-alone executable file (description is second parameter to `.command`)
|
||||
// Returns `this` for adding more commands.
|
||||
program
|
||||
.command('start <service>', 'start named service')
|
||||
.command('stop [service]', 'stop named service, or all if no name supplied');
|
||||
|
||||
// Command prepared separately.
|
||||
// Returns `this` for adding more commands.
|
||||
program
|
||||
.addCommand(build.makeBuildCommand());
|
||||
```
|
||||
|
||||
Configuration options can be passed with the call to `.command()` and `.addCommand()`. Specifying `hidden: true` will
|
||||
remove the command from the generated help output. Specifying `isDefault: true` will run the subcommand if no other
|
||||
subcommand is specified ([example](./examples/defaultCommand.js)).
|
||||
|
||||
### Specify the argument syntax
|
||||
|
||||
You use `.arguments` to specify the expected command-arguments for the top-level command, and for subcommands they are usually
|
||||
included in the `.command` call. Angled brackets (e.g. `<required>`) indicate required command-arguments.
|
||||
Square brackets (e.g. `[optional]`) indicate optional command-arguments.
|
||||
You can optionally describe the arguments in the help by supplying a hash as second parameter to `.description()`.
|
||||
|
||||
Example file: [env](./examples/env)
|
||||
|
||||
```js
|
||||
program
|
||||
.version('0.1.0')
|
||||
.arguments('<cmd> [env]')
|
||||
.description('test command', {
|
||||
cmd: 'command to run',
|
||||
env: 'environment to run test in'
|
||||
})
|
||||
.action(function (cmd, env) {
|
||||
console.log('command:', cmd);
|
||||
console.log('environment:', env || 'no environment given');
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
```
|
||||
|
||||
The last argument of a command can be variadic, and only the last argument. To make an argument variadic you
|
||||
append `...` to the argument name. For example:
|
||||
|
||||
```js
|
||||
const { program } = require('commander');
|
||||
|
||||
program
|
||||
.version('0.1.0')
|
||||
.command('rmdir <dir> [otherDirs...]')
|
||||
.action(function (dir, otherDirs) {
|
||||
console.log('rmdir %s', dir);
|
||||
if (otherDirs) {
|
||||
otherDirs.forEach(function (oDir) {
|
||||
console.log('rmdir %s', oDir);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
```
|
||||
|
||||
The variadic argument is passed to the action handler as an array.
|
||||
|
||||
### Action handler (sub)commands
|
||||
|
||||
You can add options to a command that uses an action handler.
|
||||
The action handler gets passed a parameter for each argument you declared, and one additional argument which is the
|
||||
command object itself. This command argument has the values for the command-specific options added as properties.
|
||||
|
||||
```js
|
||||
const { program } = require('commander');
|
||||
|
||||
program
|
||||
.command('rm <dir>')
|
||||
.option('-r, --recursive', 'Remove recursively')
|
||||
.action(function (dir, cmdObj) {
|
||||
console.log('remove ' + dir + (cmdObj.recursive ? ' recursively' : ''))
|
||||
})
|
||||
|
||||
program.parse(process.argv)
|
||||
```
|
||||
|
||||
You may supply an `async` action handler, in which case you call `.parseAsync` rather than `.parse`.
|
||||
|
||||
```js
|
||||
async function run() { /* code goes here */ }
|
||||
|
||||
async function main() {
|
||||
program
|
||||
.command('run')
|
||||
.action(run);
|
||||
await program.parseAsync(process.argv);
|
||||
}
|
||||
```
|
||||
|
||||
A command's options on the command line are validated when the command is used. Any unknown options will be reported as an error.
|
||||
|
||||
### Stand-alone executable (sub)commands
|
||||
|
||||
When `.command()` is invoked with a description argument, this tells Commander that you're going to use stand-alone executables for subcommands.
|
||||
Commander will search the executables in the directory of the entry script (like `./examples/pm`) with the name `program-subcommand`, like `pm-install`, `pm-search`.
|
||||
You can specify a custom name with the `executableFile` configuration option.
|
||||
|
||||
You handle the options for an executable (sub)command in the executable, and don't declare them at the top-level.
|
||||
|
||||
Example file: [pm](./examples/pm)
|
||||
|
||||
```js
|
||||
program
|
||||
.version('0.1.0')
|
||||
.command('install [name]', 'install one or more packages')
|
||||
.command('search [query]', 'search with optional query')
|
||||
.command('update', 'update installed packages', { executableFile: 'myUpdateSubCommand' })
|
||||
.command('list', 'list packages installed', { isDefault: true });
|
||||
|
||||
program.parse(process.argv);
|
||||
```
|
||||
|
||||
If the program is designed to be installed globally, make sure the executables have proper modes, like `755`.
|
||||
|
||||
## Automated help
|
||||
|
||||
The help information is auto-generated based on the information commander already knows about your program. The default
|
||||
help option is `-h,--help`.
|
||||
|
||||
Example file: [pizza](./examples/pizza)
|
||||
|
||||
```bash
|
||||
$ node ./examples/pizza --help
|
||||
Usage: pizza [options]
|
||||
|
||||
An application for pizzas ordering
|
||||
|
||||
Options:
|
||||
-V, --version output the version number
|
||||
-p, --peppers Add peppers
|
||||
-c, --cheese <type> Add the specified type of cheese (default: "marble")
|
||||
-C, --no-cheese You do not want any cheese
|
||||
-h, --help display help for command
|
||||
```
|
||||
|
||||
A `help` command is added by default if your command has subcommands. It can be used alone, or with a subcommand name to show
|
||||
further help for the subcommand. These are effectively the same if the `shell` program has implicit help:
|
||||
|
||||
```bash
|
||||
shell help
|
||||
shell --help
|
||||
|
||||
shell help spawn
|
||||
shell spawn --help
|
||||
```
|
||||
|
||||
### Custom help
|
||||
|
||||
You can display extra information by listening for "--help".
|
||||
|
||||
Example file: [custom-help](./examples/custom-help)
|
||||
|
||||
```js
|
||||
program
|
||||
.option('-f, --foo', 'enable some foo');
|
||||
|
||||
// must be before .parse()
|
||||
program.on('--help', () => {
|
||||
console.log('');
|
||||
console.log('Example call:');
|
||||
console.log(' $ custom-help --help');
|
||||
});
|
||||
```
|
||||
|
||||
Yields the following help output:
|
||||
|
||||
```Text
|
||||
Usage: custom-help [options]
|
||||
|
||||
Options:
|
||||
-f, --foo enable some foo
|
||||
-h, --help display help for command
|
||||
|
||||
Example call:
|
||||
$ custom-help --help
|
||||
```
|
||||
|
||||
### .usage and .name
|
||||
|
||||
These allow you to customise the usage description in the first line of the help. The name is otherwise
|
||||
deduced from the (full) program arguments. Given:
|
||||
|
||||
```js
|
||||
program
|
||||
.name("my-command")
|
||||
.usage("[global options] command")
|
||||
```
|
||||
|
||||
The help will start with:
|
||||
|
||||
```Text
|
||||
Usage: my-command [global options] command
|
||||
```
|
||||
|
||||
### .help(cb)
|
||||
|
||||
Output help information and exit immediately. Optional callback cb allows post-processing of help text before it is displayed.
|
||||
|
||||
### .outputHelp(cb)
|
||||
|
||||
Output help information without exiting.
|
||||
Optional callback cb allows post-processing of help text before it is displayed.
|
||||
|
||||
### .helpInformation()
|
||||
|
||||
Get the command help information as a string for processing or displaying yourself. (The text does not include the custom help
|
||||
from `--help` listeners.)
|
||||
|
||||
### .helpOption(flags, description)
|
||||
|
||||
Override the default help flags and description. Pass false to disable the built-in help option.
|
||||
|
||||
```js
|
||||
program
|
||||
.helpOption('-e, --HELP', 'read more information');
|
||||
```
|
||||
|
||||
### .addHelpCommand()
|
||||
|
||||
You can explicitly turn on or off the implicit help command with `.addHelpCommand()` and `.addHelpCommand(false)`.
|
||||
|
||||
You can both turn on and customise the help command by supplying the name and description:
|
||||
|
||||
```js
|
||||
program.addHelpCommand('assist [command]', 'show assistance');
|
||||
```
|
||||
|
||||
## Custom event listeners
|
||||
|
||||
You can execute custom actions by listening to command and option events.
|
||||
|
||||
```js
|
||||
program.on('option:verbose', function () {
|
||||
process.env.VERBOSE = this.verbose;
|
||||
});
|
||||
|
||||
program.on('command:*', function (operands) {
|
||||
console.error(`error: unknown command '${operands[0]}'`);
|
||||
const availableCommands = program.commands.map(cmd => cmd.name());
|
||||
mySuggestBestMatch(operands[0], availableCommands);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
```
|
||||
|
||||
## Bits and pieces
|
||||
|
||||
### .parse() and .parseAsync()
|
||||
|
||||
The first argument to `.parse` is the array of strings to parse. You may omit the parameter to implicitly use `process.argv`.
|
||||
|
||||
If the arguments follow different conventions than node you can pass a `from` option in the second parameter:
|
||||
|
||||
- 'node': default, `argv[0]` is the application and `argv[1]` is the script being run, with user parameters after that
|
||||
- 'electron': `argv[1]` varies depending on whether the electron application is packaged
|
||||
- 'user': all of the arguments from the user
|
||||
|
||||
For example:
|
||||
|
||||
```js
|
||||
program.parse(process.argv); // Explicit, node conventions
|
||||
program.parse(); // Implicit, and auto-detect electron
|
||||
program.parse(['-f', 'filename'], { from: 'user' });
|
||||
```
|
||||
|
||||
### Avoiding option name clashes
|
||||
|
||||
The original and default behaviour is that the option values are stored
|
||||
as properties on the program, and the action handler is passed a
|
||||
command object with the options values stored as properties.
|
||||
This is very convenient to code, but the downside is possible clashes with
|
||||
existing properties of Command.
|
||||
|
||||
There are two new routines to change the behaviour, and the default behaviour may change in the future:
|
||||
|
||||
- `storeOptionsAsProperties`: whether to store option values as properties on command object, or store separately (specify false) and access using `.opts()`
|
||||
- `passCommandToAction`: whether to pass command to action handler,
|
||||
or just the options (specify false)
|
||||
|
||||
Example file: [storeOptionsAsProperties-action.js](./examples/storeOptionsAsProperties-action.js)
|
||||
|
||||
```js
|
||||
program
|
||||
.storeOptionsAsProperties(false)
|
||||
.passCommandToAction(false);
|
||||
|
||||
program
|
||||
.name('my-program-name')
|
||||
.option('-n,--name <name>');
|
||||
|
||||
program
|
||||
.command('show')
|
||||
.option('-a,--action <action>')
|
||||
.action((options) => {
|
||||
console.log(options.action);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
const programOptions = program.opts();
|
||||
console.log(programOptions.name);
|
||||
```
|
||||
|
||||
### TypeScript
|
||||
|
||||
The Commander package includes its TypeScript Definition file.
|
||||
|
||||
If you use `ts-node` and stand-alone executable subcommands written as `.ts` files, you need to call your program through node to get the subcommands called correctly. e.g.
|
||||
|
||||
```bash
|
||||
node -r ts-node/register pm.ts
|
||||
```
|
||||
|
||||
### createCommand()
|
||||
|
||||
This factory function creates a new command. It is exported and may be used instead of using `new`, like:
|
||||
|
||||
```js
|
||||
const { createCommand } = require('commander');
|
||||
const program = createCommand();
|
||||
```
|
||||
|
||||
`createCommand` is also a method of the Command object, and creates a new command rather than a subcommand. This gets used internally
|
||||
when creating subcommands using `.command()`, and you may override it to
|
||||
customise the new subcommand (examples using [subclass](./examples/custom-command-class.js) and [function](./examples/custom-command-function.js)).
|
||||
|
||||
### Import into ECMAScript Module
|
||||
|
||||
Commander is currently a CommonJS package, and the default export can be imported into an ES Module:
|
||||
|
||||
```js
|
||||
// index.mjs
|
||||
import commander from 'commander';
|
||||
const program = commander.program;
|
||||
const newCommand = new commander.Command();
|
||||
```
|
||||
|
||||
### Node options such as `--harmony`
|
||||
|
||||
You can enable `--harmony` option in two ways:
|
||||
|
||||
- Use `#! /usr/bin/env node --harmony` in the subcommands scripts. (Note Windows does not support this pattern.)
|
||||
- Use the `--harmony` option when call the command, like `node --harmony examples/pm publish`. The `--harmony` option will be preserved when spawning subcommand process.
|
||||
|
||||
### Debugging stand-alone executable subcommands
|
||||
|
||||
An executable subcommand is launched as a separate child process.
|
||||
|
||||
If you are using the node inspector for [debugging](https://nodejs.org/en/docs/guides/debugging-getting-started/) executable subcommands using `node --inspect` et al,
|
||||
the inspector port is incremented by 1 for the spawned subcommand.
|
||||
|
||||
If you are using VSCode to debug executable subcommands you need to set the `"autoAttachChildProcesses": true` flag in your launch.json configuration.
|
||||
|
||||
### Override exit handling
|
||||
|
||||
By default Commander calls `process.exit` when it detects errors, or after displaying the help or version. You can override
|
||||
this behaviour and optionally supply a callback. The default override throws a `CommanderError`.
|
||||
|
||||
The override callback is passed a `CommanderError` with properties `exitCode` number, `code` string, and `message`. The default override behaviour is to throw the error, except for async handling of executable subcommand completion which carries on. The normal display of error messages or version or help
|
||||
is not affected by the override which is called after the display.
|
||||
|
||||
```js
|
||||
program.exitOverride();
|
||||
|
||||
try {
|
||||
program.parse(process.argv);
|
||||
} catch (err) {
|
||||
// custom processing...
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
Example file: [deploy](./examples/deploy)
|
||||
|
||||
```js
|
||||
const { program } = require('commander');
|
||||
|
||||
program
|
||||
.version('0.1.0')
|
||||
.option('-C, --chdir <path>', 'change the working directory')
|
||||
.option('-c, --config <path>', 'set config path. defaults to ./deploy.conf')
|
||||
.option('-T, --no-tests', 'ignore test hook');
|
||||
|
||||
program
|
||||
.command('setup [env]')
|
||||
.description('run setup commands for all envs')
|
||||
.option("-s, --setup_mode [mode]", "Which setup mode to use")
|
||||
.action(function(env, options){
|
||||
const mode = options.setup_mode || "normal";
|
||||
env = env || 'all';
|
||||
console.log('setup for %s env(s) with %s mode', env, mode);
|
||||
});
|
||||
|
||||
program
|
||||
.command('exec <cmd>')
|
||||
.alias('ex')
|
||||
.description('execute the given remote cmd')
|
||||
.option("-e, --exec_mode <mode>", "Which exec mode to use")
|
||||
.action(function(cmd, options){
|
||||
console.log('exec "%s" using %s mode', cmd, options.exec_mode);
|
||||
}).on('--help', function() {
|
||||
console.log('');
|
||||
console.log('Examples:');
|
||||
console.log('');
|
||||
console.log(' $ deploy exec sequential');
|
||||
console.log(' $ deploy exec async');
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
```
|
||||
|
||||
More Demos can be found in the [examples](https://github.com/tj/commander.js/tree/master/examples) directory.
|
||||
|
||||
## Support
|
||||
|
||||
The current version of Commander is fully supported on Long Term Support versions of Node, and is likely to work with Node 6 but not tested.
|
||||
(For versions of Node below Node 6, use Commander 3.x or 2.x.)
|
||||
|
||||
The main forum for free and community support is the project [Issues](https://github.com/tj/commander.js/issues) on GitHub.
|
||||
|
||||
### Commander for enterprise
|
||||
|
||||
Available as part of the Tidelift Subscription
|
||||
|
||||
The maintainers of Commander and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-commander?utm_source=npm-commander&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
|
||||
1881
node_modules/commander/index.js
generated
vendored
Normal file
1881
node_modules/commander/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
51
node_modules/commander/package.json
generated
vendored
Normal file
51
node_modules/commander/package.json
generated
vendored
Normal file
@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "commander",
|
||||
"version": "6.2.1",
|
||||
"description": "the complete solution for node.js command-line programs",
|
||||
"keywords": [
|
||||
"commander",
|
||||
"command",
|
||||
"option",
|
||||
"parser",
|
||||
"cli",
|
||||
"argument",
|
||||
"args",
|
||||
"argv"
|
||||
],
|
||||
"author": "TJ Holowaychuk <tj@vision-media.ca>",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/tj/commander.js.git"
|
||||
},
|
||||
"scripts": {
|
||||
"lint": "eslint index.js \"tests/**/*.js\"",
|
||||
"typescript-lint": "eslint typings/*.ts",
|
||||
"test": "jest && npm run test-typings",
|
||||
"test-typings": "tsc -p tsconfig.json"
|
||||
},
|
||||
"main": "index",
|
||||
"files": [
|
||||
"index.js",
|
||||
"typings/index.d.ts"
|
||||
],
|
||||
"dependencies": {},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^26.0.15",
|
||||
"@types/node": "^14.14.2",
|
||||
"@typescript-eslint/eslint-plugin": "^4.5.0",
|
||||
"eslint": "^7.11.0",
|
||||
"eslint-config-standard-with-typescript": "^19.0.1",
|
||||
"eslint-plugin-jest": "^24.1.0",
|
||||
"jest": "^26.6.0",
|
||||
"standard": "^15.0.0",
|
||||
"typescript": "^4.0.3"
|
||||
},
|
||||
"typings": "typings/index.d.ts",
|
||||
"jest": {
|
||||
"collectCoverage": true
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
}
|
||||
410
node_modules/commander/typings/index.d.ts
generated
vendored
Normal file
410
node_modules/commander/typings/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,410 @@
|
||||
// Type definitions for commander
|
||||
// Original definitions by: Alan Agius <https://github.com/alan-agius4>, Marcelo Dezem <https://github.com/mdezem>, vvakame <https://github.com/vvakame>, Jules Randolph <https://github.com/sveinburne>
|
||||
|
||||
declare namespace commander {
|
||||
|
||||
interface CommanderError extends Error {
|
||||
code: string;
|
||||
exitCode: number;
|
||||
message: string;
|
||||
nestedError?: string;
|
||||
}
|
||||
type CommanderErrorConstructor = new (exitCode: number, code: string, message: string) => CommanderError;
|
||||
|
||||
interface Option {
|
||||
flags: string;
|
||||
required: boolean; // A value must be supplied when the option is specified.
|
||||
optional: boolean; // A value is optional when the option is specified.
|
||||
mandatory: boolean; // The option must have a value after parsing, which usually means it must be specified on command line.
|
||||
bool: boolean;
|
||||
short?: string;
|
||||
long: string;
|
||||
description: string;
|
||||
}
|
||||
type OptionConstructor = new (flags: string, description?: string) => Option;
|
||||
|
||||
interface ParseOptions {
|
||||
from: 'node' | 'electron' | 'user';
|
||||
}
|
||||
|
||||
interface Command {
|
||||
[key: string]: any; // options as properties
|
||||
|
||||
args: string[];
|
||||
|
||||
commands: Command[];
|
||||
|
||||
/**
|
||||
* Set the program version to `str`.
|
||||
*
|
||||
* This method auto-registers the "-V, --version" flag
|
||||
* which will print the version number when passed.
|
||||
*
|
||||
* You can optionally supply the flags and description to override the defaults.
|
||||
*/
|
||||
version(str: string, flags?: string, description?: string): this;
|
||||
|
||||
/**
|
||||
* Define a command, implemented using an action handler.
|
||||
*
|
||||
* @remarks
|
||||
* The command description is supplied using `.description`, not as a parameter to `.command`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* program
|
||||
* .command('clone <source> [destination]')
|
||||
* .description('clone a repository into a newly created directory')
|
||||
* .action((source, destination) => {
|
||||
* console.log('clone command called');
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @param nameAndArgs - command name and arguments, args are `<required>` or `[optional]` and last may also be `variadic...`
|
||||
* @param opts - configuration options
|
||||
* @returns new command
|
||||
*/
|
||||
command(nameAndArgs: string, opts?: CommandOptions): ReturnType<this['createCommand']>;
|
||||
/**
|
||||
* Define a command, implemented in a separate executable file.
|
||||
*
|
||||
* @remarks
|
||||
* The command description is supplied as the second parameter to `.command`.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* program
|
||||
* .command('start <service>', 'start named service')
|
||||
* .command('stop [service]', 'stop named service, or all if no name supplied');
|
||||
* ```
|
||||
*
|
||||
* @param nameAndArgs - command name and arguments, args are `<required>` or `[optional]` and last may also be `variadic...`
|
||||
* @param description - description of executable command
|
||||
* @param opts - configuration options
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
command(nameAndArgs: string, description: string, opts?: commander.ExecutableCommandOptions): this;
|
||||
|
||||
/**
|
||||
* Factory routine to create a new unattached command.
|
||||
*
|
||||
* See .command() for creating an attached subcommand, which uses this routine to
|
||||
* create the command. You can override createCommand to customise subcommands.
|
||||
*/
|
||||
createCommand(name?: string): Command;
|
||||
|
||||
/**
|
||||
* Add a prepared subcommand.
|
||||
*
|
||||
* See .command() for creating an attached subcommand which inherits settings from its parent.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
addCommand(cmd: Command, opts?: CommandOptions): this;
|
||||
|
||||
/**
|
||||
* Define argument syntax for command.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
arguments(desc: string): this;
|
||||
|
||||
/**
|
||||
* Override default decision whether to add implicit help command.
|
||||
*
|
||||
* addHelpCommand() // force on
|
||||
* addHelpCommand(false); // force off
|
||||
* addHelpCommand('help [cmd]', 'display help for [cmd]'); // force on with custom details
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
addHelpCommand(enableOrNameAndArgs?: string | boolean, description?: string): this;
|
||||
|
||||
/**
|
||||
* Register callback to use as replacement for calling process.exit.
|
||||
*/
|
||||
exitOverride(callback?: (err: CommanderError) => never|void): this;
|
||||
|
||||
/**
|
||||
* Register callback `fn` for the command.
|
||||
*
|
||||
* @example
|
||||
* program
|
||||
* .command('help')
|
||||
* .description('display verbose help')
|
||||
* .action(function() {
|
||||
* // output help here
|
||||
* });
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
action(fn: (...args: any[]) => void | Promise<void>): this;
|
||||
|
||||
/**
|
||||
* Define option with `flags`, `description` and optional
|
||||
* coercion `fn`.
|
||||
*
|
||||
* The `flags` string should contain both the short and long flags,
|
||||
* separated by comma, a pipe or space. The following are all valid
|
||||
* all will output this way when `--help` is used.
|
||||
*
|
||||
* "-p, --pepper"
|
||||
* "-p|--pepper"
|
||||
* "-p --pepper"
|
||||
*
|
||||
* @example
|
||||
* // simple boolean defaulting to false
|
||||
* program.option('-p, --pepper', 'add pepper');
|
||||
*
|
||||
* --pepper
|
||||
* program.pepper
|
||||
* // => Boolean
|
||||
*
|
||||
* // simple boolean defaulting to true
|
||||
* program.option('-C, --no-cheese', 'remove cheese');
|
||||
*
|
||||
* program.cheese
|
||||
* // => true
|
||||
*
|
||||
* --no-cheese
|
||||
* program.cheese
|
||||
* // => false
|
||||
*
|
||||
* // required argument
|
||||
* program.option('-C, --chdir <path>', 'change the working directory');
|
||||
*
|
||||
* --chdir /tmp
|
||||
* program.chdir
|
||||
* // => "/tmp"
|
||||
*
|
||||
* // optional argument
|
||||
* program.option('-c, --cheese [type]', 'add cheese [marble]');
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
option(flags: string, description?: string, defaultValue?: string | boolean): this;
|
||||
option(flags: string, description: string, regexp: RegExp, defaultValue?: string | boolean): this;
|
||||
option<T>(flags: string, description: string, fn: (value: string, previous: T) => T, defaultValue?: T): this;
|
||||
|
||||
/**
|
||||
* Define a required option, which must have a value after parsing. This usually means
|
||||
* the option must be specified on the command line. (Otherwise the same as .option().)
|
||||
*
|
||||
* The `flags` string should contain both the short and long flags, separated by comma, a pipe or space.
|
||||
*/
|
||||
requiredOption(flags: string, description?: string, defaultValue?: string | boolean): this;
|
||||
requiredOption(flags: string, description: string, regexp: RegExp, defaultValue?: string | boolean): this;
|
||||
requiredOption<T>(flags: string, description: string, fn: (value: string, previous: T) => T, defaultValue?: T): this;
|
||||
|
||||
/**
|
||||
* Whether to store option values as properties on command object,
|
||||
* or store separately (specify false). In both cases the option values can be accessed using .opts().
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
storeOptionsAsProperties(value?: boolean): this;
|
||||
|
||||
/**
|
||||
* Whether to pass command to action handler,
|
||||
* or just the options (specify false).
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
passCommandToAction(value?: boolean): this;
|
||||
|
||||
/**
|
||||
* Alter parsing of short flags with optional values.
|
||||
*
|
||||
* @example
|
||||
* // for `.option('-f,--flag [value]'):
|
||||
* .combineFlagAndOptionalValue(true) // `-f80` is treated like `--flag=80`, this is the default behaviour
|
||||
* .combineFlagAndOptionalValue(false) // `-fb` is treated like `-f -b`
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
combineFlagAndOptionalValue(arg?: boolean): this;
|
||||
|
||||
/**
|
||||
* Allow unknown options on the command line.
|
||||
*
|
||||
* @param [arg] if `true` or omitted, no error will be thrown for unknown options.
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
allowUnknownOption(arg?: boolean): this;
|
||||
|
||||
/**
|
||||
* Parse `argv`, setting options and invoking commands when defined.
|
||||
*
|
||||
* The default expectation is that the arguments are from node and have the application as argv[0]
|
||||
* and the script being run in argv[1], with user parameters after that.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* program.parse(process.argv);
|
||||
* program.parse(); // implicitly use process.argv and auto-detect node vs electron conventions
|
||||
* program.parse(my-args, { from: 'user' }); // just user supplied arguments, nothing special about argv[0]
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
parse(argv?: string[], options?: ParseOptions): this;
|
||||
|
||||
/**
|
||||
* Parse `argv`, setting options and invoking commands when defined.
|
||||
*
|
||||
* Use parseAsync instead of parse if any of your action handlers are async. Returns a Promise.
|
||||
*
|
||||
* The default expectation is that the arguments are from node and have the application as argv[0]
|
||||
* and the script being run in argv[1], with user parameters after that.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* program.parseAsync(process.argv);
|
||||
* program.parseAsync(); // implicitly use process.argv and auto-detect node vs electron conventions
|
||||
* program.parseAsync(my-args, { from: 'user' }); // just user supplied arguments, nothing special about argv[0]
|
||||
*
|
||||
* @returns Promise
|
||||
*/
|
||||
parseAsync(argv?: string[], options?: ParseOptions): Promise<this>;
|
||||
|
||||
/**
|
||||
* Parse options from `argv` removing known options,
|
||||
* and return argv split into operands and unknown arguments.
|
||||
*
|
||||
* @example
|
||||
* argv => operands, unknown
|
||||
* --known kkk op => [op], []
|
||||
* op --known kkk => [op], []
|
||||
* sub --unknown uuu op => [sub], [--unknown uuu op]
|
||||
* sub -- --unknown uuu op => [sub --unknown uuu op], []
|
||||
*/
|
||||
parseOptions(argv: string[]): commander.ParseOptionsResult;
|
||||
|
||||
/**
|
||||
* Return an object containing options as key-value pairs
|
||||
*/
|
||||
opts(): { [key: string]: any };
|
||||
|
||||
/**
|
||||
* Set the description.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
description(str: string, argsDescription?: {[argName: string]: string}): this;
|
||||
/**
|
||||
* Get the description.
|
||||
*/
|
||||
description(): string;
|
||||
|
||||
/**
|
||||
* Set an alias for the command.
|
||||
*
|
||||
* You may call more than once to add multiple aliases. Only the first alias is shown in the auto-generated help.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
alias(alias: string): this;
|
||||
/**
|
||||
* Get alias for the command.
|
||||
*/
|
||||
alias(): string;
|
||||
|
||||
/**
|
||||
* Set aliases for the command.
|
||||
*
|
||||
* Only the first alias is shown in the auto-generated help.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
aliases(aliases: string[]): this;
|
||||
/**
|
||||
* Get aliases for the command.
|
||||
*/
|
||||
aliases(): string[];
|
||||
|
||||
/**
|
||||
* Set the command usage.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
usage(str: string): this;
|
||||
/**
|
||||
* Get the command usage.
|
||||
*/
|
||||
usage(): string;
|
||||
|
||||
/**
|
||||
* Set the name of the command.
|
||||
*
|
||||
* @returns `this` command for chaining
|
||||
*/
|
||||
name(str: string): this;
|
||||
/**
|
||||
* Get the name of the command.
|
||||
*/
|
||||
name(): string;
|
||||
|
||||
/**
|
||||
* Output help information for this command.
|
||||
*
|
||||
* When listener(s) are available for the helpLongFlag
|
||||
* those callbacks are invoked.
|
||||
*/
|
||||
outputHelp(cb?: (str: string) => string): void;
|
||||
|
||||
/**
|
||||
* Return command help documentation.
|
||||
*/
|
||||
helpInformation(): string;
|
||||
|
||||
/**
|
||||
* You can pass in flags and a description to override the help
|
||||
* flags and help description for your command. Pass in false
|
||||
* to disable the built-in help option.
|
||||
*/
|
||||
helpOption(flags?: string | boolean, description?: string): this;
|
||||
|
||||
/**
|
||||
* Output help information and exit.
|
||||
*/
|
||||
help(cb?: (str: string) => string): never;
|
||||
|
||||
/**
|
||||
* Add a listener (callback) for when events occur. (Implemented using EventEmitter.)
|
||||
*
|
||||
* @example
|
||||
* program
|
||||
* .on('--help', () -> {
|
||||
* console.log('See web site for more information.');
|
||||
* });
|
||||
*/
|
||||
on(event: string | symbol, listener: (...args: any[]) => void): this;
|
||||
}
|
||||
type CommandConstructor = new (name?: string) => Command;
|
||||
|
||||
interface CommandOptions {
|
||||
noHelp?: boolean; // old name for hidden
|
||||
hidden?: boolean;
|
||||
isDefault?: boolean;
|
||||
}
|
||||
interface ExecutableCommandOptions extends CommandOptions {
|
||||
executableFile?: string;
|
||||
}
|
||||
|
||||
interface ParseOptionsResult {
|
||||
operands: string[];
|
||||
unknown: string[];
|
||||
}
|
||||
|
||||
interface CommanderStatic extends Command {
|
||||
program: Command;
|
||||
Command: CommandConstructor;
|
||||
Option: OptionConstructor;
|
||||
CommanderError: CommanderErrorConstructor;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Declaring namespace AND global
|
||||
// eslint-disable-next-line @typescript-eslint/no-redeclare
|
||||
declare const commander: commander.CommanderStatic;
|
||||
export = commander;
|
||||
58
node_modules/csv-writer/CHANGELOG.md
generated
vendored
Normal file
58
node_modules/csv-writer/CHANGELOG.md
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.6.0] - 2020-01-18
|
||||
### Added
|
||||
- Support for specifying values in nested objects. [#34](https://github.com/ryu1kn/csv-writer/pull/34)
|
||||
|
||||
## [1.5.0] - 2019-07-13
|
||||
### Added
|
||||
- Added `alwaysQuote` flag to always double-quote all fields. [#21](https://github.com/ryu1kn/csv-writer/pull/21)
|
||||
|
||||
## [1.4.0] - 2019-06-19
|
||||
### Added
|
||||
- Allow CRLF as a record delimiter. [#27](https://github.com/ryu1kn/csv-writer/pull/27)
|
||||
|
||||
## [1.3.0] - 2019-04-19
|
||||
### Changed
|
||||
- Changed project language from JavaScript to TypeScript.
|
||||
|
||||
### Added
|
||||
- Made TypeScript type definitions accessible. Thanks to @coyotte508.
|
||||
[PR #23](https://github.com/ryu1kn/csv-writer/pull/23)
|
||||
|
||||
## [1.2.0] - 2018-08-22
|
||||
### Added
|
||||
- CSV records are now not limited to an array but can be an iterable object. Thanks to @pineapplemachine.
|
||||
[PR #11](https://github.com/ryu1kn/csv-writer/pull/11)
|
||||
|
||||
## [1.1.0] - 2018-08-20
|
||||
### Added
|
||||
- Allow semicolon as a field delimiter as it is commonly used in CSV in some regions. Thanks to @HKskn.
|
||||
[PR #8](https://github.com/ryu1kn/csv-writer/pull/8), [#6](https://github.com/ryu1kn/csv-writer/pull/6)
|
||||
|
||||
## [1.0.1] - 2018-08-09
|
||||
### Fixed
|
||||
- Fixed the issue that coverage report badge on README shows question mark.
|
||||
Use Coveralls instead of CodeClimate to get code coverage.
|
||||
|
||||
## [1.0.0] - 2018-02-28
|
||||
### Added
|
||||
- Support for adding CSV records to already existing files. Thanks to @jonmelcher. [PR #4](https://github.com/ryu1kn/csv-writer/pull/4)
|
||||
|
||||
## [0.0.3] - 2016-11-09
|
||||
### Fixed
|
||||
- Fixed the bug that fields were not always surrounded by double quotes
|
||||
- Fixed the bug that white space characters on the edge of fields were trimmed
|
||||
|
||||
## [0.0.2] - 2016-10-15
|
||||
### Fixed
|
||||
- Fixed the bug that field values were not quoted when they have newline characters
|
||||
|
||||
## [0.0.1] - 2016-09-09
|
||||
### Added
|
||||
- Initial release of csv-writer
|
||||
328
node_modules/csv-writer/README.md
generated
vendored
Normal file
328
node_modules/csv-writer/README.md
generated
vendored
Normal file
@ -0,0 +1,328 @@
|
||||
[](https://travis-ci.org/ryu1kn/csv-writer)
|
||||
[](https://coveralls.io/github/ryu1kn/csv-writer?branch=master)
|
||||
[](https://codeclimate.com/github/ryu1kn/csv-writer)
|
||||
|
||||
# CSV Writer
|
||||
|
||||
Convert objects/arrays into a CSV string or write them into a file.
|
||||
It respects [RFC 4180](https://tools.ietf.org/html/rfc4180) for the output CSV format.
|
||||
|
||||
## Prerequisite
|
||||
|
||||
* Node version 4 or above
|
||||
|
||||
## Usage
|
||||
|
||||
The example below shows how you can write records defined as the array of objects into a file.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
path: 'path/to/file.csv',
|
||||
header: [
|
||||
{id: 'name', title: 'NAME'},
|
||||
{id: 'lang', title: 'LANGUAGE'}
|
||||
]
|
||||
});
|
||||
|
||||
const records = [
|
||||
{name: 'Bob', lang: 'French, English'},
|
||||
{name: 'Mary', lang: 'English'}
|
||||
];
|
||||
|
||||
csvWriter.writeRecords(records) // returns a promise
|
||||
.then(() => {
|
||||
console.log('...Done');
|
||||
});
|
||||
|
||||
// This will produce a file path/to/file.csv with following contents:
|
||||
//
|
||||
// NAME,LANGUAGE
|
||||
// Bob,"French, English"
|
||||
// Mary,English
|
||||
```
|
||||
|
||||
You can keep writing records into the same file by calling `writeRecords` multiple times
|
||||
(but need to wait for the fulfillment of the `promise` of the previous `writeRecords` call).
|
||||
|
||||
```js
|
||||
// In an `async` function
|
||||
await csvWriter.writeRecords(records1)
|
||||
await csvWriter.writeRecords(records2)
|
||||
...
|
||||
```
|
||||
|
||||
However, if you need to keep writing large data to a certain file, you would want to create
|
||||
node's transform stream and use `CsvStringifier`, which is explained later, inside it
|
||||
, and pipe the stream into a file write stream.
|
||||
|
||||
If you don't want to write a header line, don't give `title` to header elements and just give field IDs as a string.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createObjectCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
path: 'path/to/file.csv',
|
||||
header: ['name', 'lang']
|
||||
});
|
||||
```
|
||||
|
||||
If each record is defined as an array, use `createArrayCsvWriter` to get an `csvWriter`.
|
||||
|
||||
```js
|
||||
const createCsvWriter = require('csv-writer').createArrayCsvWriter;
|
||||
const csvWriter = createCsvWriter({
|
||||
header: ['NAME', 'LANGUAGE'],
|
||||
path: 'path/to/file.csv'
|
||||
});
|
||||
|
||||
const records = [
|
||||
['Bob', 'French, English'],
|
||||
['Mary', 'English']
|
||||
];
|
||||
|
||||
csvWriter.writeRecords(records) // returns a promise
|
||||
.then(() => {
|
||||
console.log('...Done');
|
||||
});
|
||||
|
||||
// This will produce a file path/to/file.csv with following contents:
|
||||
//
|
||||
// NAME,LANGUAGE
|
||||
// Bob,"French, English"
|
||||
// Mary,English
|
||||
```
|
||||
|
||||
If you just want to get a CSV string but don't want to write into a file,
|
||||
you can use `createObjectCsvStringifier` (or `createArrayCsvStringifier`)
|
||||
to get an `csvStringifier`.
|
||||
|
||||
```js
|
||||
const createCsvStringifier = require('csv-writer').createObjectCsvStringifier;
|
||||
const csvStringifier = createCsvStringifier({
|
||||
header: [
|
||||
{id: 'name', title: 'NAME'},
|
||||
{id: 'lang', title: 'LANGUAGE'}
|
||||
]
|
||||
});
|
||||
|
||||
const records = [
|
||||
{name: 'Bob', lang: 'French, English'},
|
||||
{name: 'Mary', lang: 'English'}
|
||||
];
|
||||
|
||||
console.log(csvStringifier.getHeaderString());
|
||||
// => 'NAME,LANGUAGE\n'
|
||||
|
||||
console.log(csvStringifier.stringifyRecords(records));
|
||||
// => 'Bob,"French, English"\nMary,English\n'
|
||||
```
|
||||
|
||||
|
||||
## API
|
||||
|
||||
### createObjectCsvWriter(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* path `<string>`
|
||||
|
||||
Path to a write file
|
||||
|
||||
* header `<Array<{id, title}|string>>`
|
||||
|
||||
Array of objects (`id` and `title` properties) or strings (field IDs).
|
||||
A header line will be written to the file only if given as an array of objects.
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* headerIdDelimiter `<string>` (optional)
|
||||
|
||||
Default: `undefined`. Give this value to specify a path to a value in a nested object.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
* encoding `<string>` (optional)
|
||||
|
||||
Default: `utf8`.
|
||||
|
||||
* append `<boolean>` (optional)
|
||||
|
||||
Default: `false`. When `true`, it will append CSV records to the specified file.
|
||||
If the file doesn't exist, it will create one.
|
||||
|
||||
**NOTE:** A header line will not be written to the file if `true` is given.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<CsvWriter>`
|
||||
|
||||
|
||||
### createArrayCsvWriter(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* path `<string>`
|
||||
|
||||
Path to a write file
|
||||
|
||||
* header `<Array<string>>` (optional)
|
||||
|
||||
Array of field titles
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
* encoding `<string>` (optional)
|
||||
|
||||
Default: `utf8`.
|
||||
|
||||
* append `<boolean>` (optional)
|
||||
|
||||
Default: `false`. When `true`, it will append CSV records to the specified file.
|
||||
If the file doesn't exist, it will create one.
|
||||
|
||||
**NOTE:** A header line will not be written to the file if `true` is given.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<CsvWriter>`
|
||||
|
||||
|
||||
### CsvWriter#writeRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Iterator<Object|Array>>`
|
||||
|
||||
Depending on which function was used to create a `csvWriter` (i.e. `createObjectCsvWriter` or `createArrayCsvWriter`),
|
||||
records will be either a collection of objects or arrays. As long as the collection is iterable, it doesn't need to be an array.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<Promise>`
|
||||
|
||||
|
||||
### createObjectCsvStringifier(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* header `<Array<{id, title}|string>>`
|
||||
|
||||
Array of objects (`id` and `title` properties) or strings (field IDs)
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* headerIdDelimiter `<string>` (optional)
|
||||
|
||||
Default: `undefined`. Give this value to specify a path to a value in a nested object.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<ObjectCsvStringifier>`
|
||||
|
||||
### ObjectCsvStringifier#getHeaderString()
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### ObjectCsvStringifier#stringifyRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Array<Object>>`
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### createArrayCsvStringifier(params)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* params `<Object>`
|
||||
* header `<Array<string>>` (optional)
|
||||
|
||||
Array of field titles
|
||||
|
||||
* fieldDelimiter `<string>` (optional)
|
||||
|
||||
Default: `,`. Only either comma `,` or semicolon `;` is allowed.
|
||||
|
||||
* recordDelimiter `<string>` (optional)
|
||||
|
||||
Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed.
|
||||
|
||||
* alwaysQuote `<boolean>` (optional)
|
||||
|
||||
Default: `false`. Set it to `true` to double-quote all fields regardless of their values.
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<ArrayCsvStringifier>`
|
||||
|
||||
### ArrayCsvStringifier#getHeaderString()
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
### ArrayCsvStringifier#stringifyRecords(records)
|
||||
|
||||
##### Parameters:
|
||||
|
||||
* records `<Array<Array<string>>>`
|
||||
|
||||
##### Returns:
|
||||
|
||||
* `<string>`
|
||||
|
||||
|
||||
## Request Features or Report Bugs
|
||||
|
||||
Feature requests and bug reports are very welcome: https://github.com/ryu1kn/csv-writer/issues
|
||||
|
||||
A couple of requests from me when you raise an issue on GitHub.
|
||||
|
||||
* **Requesting a feature:** Please try to provide the context of why you want the feature. Such as,
|
||||
in what situation the feature could help you and how, or how the lack of the feature is causing an inconvenience to you.
|
||||
I can't start thinking of introducing it until I understand how it helps you 🙂
|
||||
* **Reporting a bug:** If you could provide a runnable code snippet that reproduces the bug, it would be very helpful!
|
||||
|
||||
|
||||
## Development
|
||||
|
||||
### Prerequisite
|
||||
|
||||
* Node version 8 or above
|
||||
* Docker
|
||||
19
node_modules/csv-writer/dist/index.js
generated
vendored
Normal file
19
node_modules/csv-writer/dist/index.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var csv_stringifier_factory_1 = require("./lib/csv-stringifier-factory");
|
||||
var csv_writer_factory_1 = require("./lib/csv-writer-factory");
|
||||
var csvStringifierFactory = new csv_stringifier_factory_1.CsvStringifierFactory();
|
||||
var csvWriterFactory = new csv_writer_factory_1.CsvWriterFactory(csvStringifierFactory);
|
||||
exports.createArrayCsvStringifier = function (params) {
|
||||
return csvStringifierFactory.createArrayCsvStringifier(params);
|
||||
};
|
||||
exports.createObjectCsvStringifier = function (params) {
|
||||
return csvStringifierFactory.createObjectCsvStringifier(params);
|
||||
};
|
||||
exports.createArrayCsvWriter = function (params) {
|
||||
return csvWriterFactory.createArrayCsvWriter(params);
|
||||
};
|
||||
exports.createObjectCsvWriter = function (params) {
|
||||
return csvWriterFactory.createObjectCsvWriter(params);
|
||||
};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
node_modules/csv-writer/dist/index.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/index.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,yEAIuC;AACvC,+DAAuG;AAEvG,IAAM,qBAAqB,GAAG,IAAI,+CAAqB,EAAE,CAAC;AAC1D,IAAM,gBAAgB,GAAG,IAAI,qCAAgB,CAAC,qBAAqB,CAAC,CAAC;AAExD,QAAA,yBAAyB,GAAG,UAAC,MAAiC;IACnE,OAAA,qBAAqB,CAAC,yBAAyB,CAAC,MAAM,CAAC;AAAvD,CAAuD,CAAC;AAEnD,QAAA,0BAA0B,GAAG,UAAC,MAAkC;IACrE,OAAA,qBAAqB,CAAC,0BAA0B,CAAC,MAAM,CAAC;AAAxD,CAAwD,CAAC;AAEpD,QAAA,oBAAoB,GAAG,UAAC,MAA4B;IACzD,OAAA,gBAAgB,CAAC,oBAAoB,CAAC,MAAM,CAAC;AAA7C,CAA6C,CAAC;AAEzC,QAAA,qBAAqB,GAAG,UAAC,MAA6B;IAC3D,OAAA,gBAAgB,CAAC,qBAAqB,CAAC,MAAM,CAAC;AAA9C,CAA8C,CAAC"}
|
||||
20
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js
generated
vendored
Normal file
20
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var array_1 = require("./csv-stringifiers/array");
|
||||
var field_stringifier_1 = require("./field-stringifier");
|
||||
var object_1 = require("./csv-stringifiers/object");
|
||||
var CsvStringifierFactory = /** @class */ (function () {
|
||||
function CsvStringifierFactory() {
|
||||
}
|
||||
CsvStringifierFactory.prototype.createArrayCsvStringifier = function (params) {
|
||||
var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new array_1.ArrayCsvStringifier(fieldStringifier, params.recordDelimiter, params.header);
|
||||
};
|
||||
CsvStringifierFactory.prototype.createObjectCsvStringifier = function (params) {
|
||||
var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote);
|
||||
return new object_1.ObjectCsvStringifier(fieldStringifier, params.header, params.recordDelimiter, params.headerIdDelimiter);
|
||||
};
|
||||
return CsvStringifierFactory;
|
||||
}());
|
||||
exports.CsvStringifierFactory = CsvStringifierFactory;
|
||||
//# sourceMappingURL=csv-stringifier-factory.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"csv-stringifier-factory.js","sourceRoot":"","sources":["../../src/lib/csv-stringifier-factory.ts"],"names":[],"mappings":";;AAAA,kDAA6D;AAC7D,yDAA2D;AAC3D,oDAA+D;AAkB/D;IAAA;IAYA,CAAC;IAVG,yDAAyB,GAAzB,UAA0B,MAAiC;QACvD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,2BAAmB,CAAC,gBAAgB,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IAC5F,CAAC;IAED,0DAA0B,GAA1B,UAA2B,MAAkC;QACzD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,6BAAoB,CAAC,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;IACvH,CAAC;IAEL,4BAAC;AAAD,CAAC,AAZD,IAYC;AAZY,sDAAqB"}
|
||||
38
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js
generated
vendored
Normal file
38
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var DEFAULT_RECORD_DELIMITER = '\n';
|
||||
var VALID_RECORD_DELIMITERS = [DEFAULT_RECORD_DELIMITER, '\r\n'];
|
||||
var CsvStringifier = /** @class */ (function () {
|
||||
function CsvStringifier(fieldStringifier, recordDelimiter) {
|
||||
if (recordDelimiter === void 0) { recordDelimiter = DEFAULT_RECORD_DELIMITER; }
|
||||
this.fieldStringifier = fieldStringifier;
|
||||
this.recordDelimiter = recordDelimiter;
|
||||
_validateRecordDelimiter(recordDelimiter);
|
||||
}
|
||||
CsvStringifier.prototype.getHeaderString = function () {
|
||||
var headerRecord = this.getHeaderRecord();
|
||||
return headerRecord ? this.joinRecords([this.getCsvLine(headerRecord)]) : null;
|
||||
};
|
||||
CsvStringifier.prototype.stringifyRecords = function (records) {
|
||||
var _this = this;
|
||||
var csvLines = Array.from(records, function (record) { return _this.getCsvLine(_this.getRecordAsArray(record)); });
|
||||
return this.joinRecords(csvLines);
|
||||
};
|
||||
CsvStringifier.prototype.getCsvLine = function (record) {
|
||||
var _this = this;
|
||||
return record
|
||||
.map(function (fieldValue) { return _this.fieldStringifier.stringify(fieldValue); })
|
||||
.join(this.fieldStringifier.fieldDelimiter);
|
||||
};
|
||||
CsvStringifier.prototype.joinRecords = function (records) {
|
||||
return records.join(this.recordDelimiter) + this.recordDelimiter;
|
||||
};
|
||||
return CsvStringifier;
|
||||
}());
|
||||
exports.CsvStringifier = CsvStringifier;
|
||||
function _validateRecordDelimiter(delimiter) {
|
||||
if (VALID_RECORD_DELIMITERS.indexOf(delimiter) === -1) {
|
||||
throw new Error("Invalid record delimiter `" + delimiter + "` is specified");
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=abstract.js.map
|
||||
1
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map
generated
vendored
Normal file
1
node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"abstract.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/abstract.ts"],"names":[],"mappings":";;AAGA,IAAM,wBAAwB,GAAG,IAAI,CAAC;AACtC,IAAM,uBAAuB,GAAG,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC;AAEnE;IAEI,wBAA6B,gBAAkC,EAClC,eAA0C;QAA1C,gCAAA,EAAA,0CAA0C;QAD1C,qBAAgB,GAAhB,gBAAgB,CAAkB;QAClC,oBAAe,GAAf,eAAe,CAA2B;QACnE,wBAAwB,CAAC,eAAe,CAAC,CAAC;IAC9C,CAAC;IAED,wCAAe,GAAf;QACI,IAAM,YAAY,GAAG,IAAI,CAAC,eAAe,EAAE,CAAC;QAC5C,OAAO,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACnF,CAAC;IAED,yCAAgB,GAAhB,UAAiB,OAAkC;QAAnD,iBAGC;QAFG,IAAM,QAAQ,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,UAAA,MAAM,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,KAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAA9C,CAA8C,CAAC,CAAC;QAC/F,OAAO,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;IACtC,CAAC;IAMO,mCAAU,GAAlB,UAAmB,MAAe;QAAlC,iBAIC;QAHG,OAAO,MAAM;aACR,GAAG,CAAC,UAAA,UAAU,IAAI,OAAA,KAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC,UAAU,CAAC,EAA3C,CAA2C,CAAC;aAC9D,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC;IACpD,CAAC;IAEO,oCAAW,GAAnB,UAAoB,OAAiB;QACjC,OAAO,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,eAAe,CAAC;IACrE,CAAC;IACL,qBAAC;AAAD,CAAC,AA9BD,IA8BC;AA9BqB,wCAAc;AAgCpC,SAAS,wBAAwB,CAAC,SAAiB;IAC/C,IAAI,uBAAuB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;QACnD,MAAM,IAAI,KAAK,CAAC,+BAA8B,SAAS,mBAAiB,CAAC,CAAC;KAC7E;AACL,CAAC"}
|
||||
33
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js
generated
vendored
Normal file
33
node_modules/csv-writer/dist/lib/csv-stringifiers/array.js
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
"use strict";
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var abstract_1 = require("./abstract");
|
||||
var ArrayCsvStringifier = /** @class */ (function (_super) {
|
||||
__extends(ArrayCsvStringifier, _super);
|
||||
function ArrayCsvStringifier(fieldStringifier, recordDelimiter, header) {
|
||||
var _this = _super.call(this, fieldStringifier, recordDelimiter) || this;
|
||||
_this.header = header;
|
||||
return _this;
|
||||
}
|
||||
ArrayCsvStringifier.prototype.getHeaderRecord = function () {
|
||||
return this.header;
|
||||
};
|
||||
ArrayCsvStringifier.prototype.getRecordAsArray = function (record) {
|
||||
return record;
|
||||
};
|
||||
return ArrayCsvStringifier;
|
||||
}(abstract_1.CsvStringifier));
|
||||
exports.ArrayCsvStringifier = ArrayCsvStringifier;
|
||||
//# sourceMappingURL=array.js.map
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user