Compare commits

...

No commits in common. "main" and "test" have entirely different histories.
main ... test

14439 changed files with 0 additions and 2828712 deletions

View File

@ -1,2 +0,0 @@
# API

View File

@ -1,5 +0,0 @@
PORT=3000
JWT_KEY= c91bd663809d5f1faeba294d4241cd9a!@#$%^&*()spot
DOMAIN=https://api.medcify.app
STORE_URL=https://medcify.app
IMAGE_URL=https://api.medcify.app/uploads/

View File

@ -1,32 +0,0 @@
const { verify } = require('jsonwebtoken');
module.exports={
checkToken : (req,res,next)=>{
let token=req.get('authorization');
if(token)
{
token=token.slice(7);
verify(token,process.env.JWT_KEY,(err,decoded)=>{
if(err)
{
res.json({
"code":"2",
"message":"This mobile number is register on another device please check"
})
}
else
{
next();
}
});
}
else
{
res.json({
"success":"0",
"message":"Unauthorized user Credentials"
})
}
}
}

View File

@ -1,23 +0,0 @@
{
"development": {
"username": "bubbles",
"password": "P123!admin",
"database": "medcify",
"host": "localhost",
"dialect": "mysql"
},
"test": {
"username": "bubbles",
"password": "P123!admin",
"database": "medcify",
"host": "localhost",
"dialect": "mysql"
},
"production": {
"username": "bubbles",
"password": "P123!admin",
"database": "medcify",
"host": "localhost",
"dialect": "mysql"
}
}

View File

@ -1,17 +0,0 @@
var admin = require("firebase-admin");
var serviceAccount = require("../helper/medcifyapp-firebase-adminsdk-fil7a-46796e6995.json");
admin.initializeApp({ credential: admin.credential.cert(serviceAccount),
databaseURL: ""})
module.exports.admin = admin

View File

@ -1,33 +0,0 @@
const { verify } = require('jsonwebtoken');
module.exports = {
authVerify: (req, res, next) => {
let token = req.get('authorization');
if (token) {
token = token.slice(7);
verify(token, process.env.JWT_KEY, (err, decoded) => {
if (err) {
res.json({
"Code": "2",
"Message": "Unauthorized Credentials"
})
}
else {
next();
}
});
}
else {
res.json({
"success": "0",
"message": "Unauthorized user Credentials"
})
}
}
}

View File

@ -1,22 +0,0 @@
const bcrypt=require('bcryptjs');
const saltRound = 10;
//Encryption create a hashpassword ee
const hashGenerate=async(plainPassword)=>
{
const salt=await bcrypt.genSalt(saltRound);
const hash=await bcrypt.hash(plainPassword,salt);
return hash;
}
//Compare Plain pwd and hashpassword
//const existingUser=await models.Employee.findOne({where:{email:req.body.password}});
const hashValidator=async(plainPassword,oldpwd)=>
{
const result=await bcrypt.compare(plainPassword,oldpwd);
return result;
}
module.exports.hashGenerate=hashGenerate;
module.exports.hashValidator=hashValidator;

View File

@ -1,12 +0,0 @@
{
"type": "service_account",
"project_id": "medcifyapp",
"private_key_id": "46796e699534de5fdd79cf571cb91aa7eada55c4",
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDO+shl0/1huiSz\nhIzrYn1b+9O0RJnPMTI92dXLYlS02n598vwWYzzuTg0QO/wG18khXesLyiT5OJL1\nU84o0VrafOWsD34euNPLbOfI1xKzPhnlnPr4ejCwhnwCq1ap9F3QlJ7CQmwMl2Sf\nCg6PXHhc0ui9dGtvwtnX3VOZmBbthaBfZp3at1Kca1dMRZzYy8RcR+s/DNY8xEzq\nOT0/o9HQz0Us1fxgyY++yzhgywnK0W//AB9yk5o22eeJ41f811iodaNj4ATJ/nxx\n8kyD/VwWbp21EOV/OJngzoEQcjA91gXF9fcnmQoPzs50ULH1pYNohGD9JGiXmw8J\ncl2jMmN7AgMBAAECggEANM71bE2fG10T2h2amr/hPiPNSvQpzhw+Nluq2Z54AECy\n/02MleWx+Rc+7AJx9gj+Tx4YR0THFDMkA6XjNrTbml09eB3nzszXdNKPfMWrBsJ0\ndwoLWnRKke9uGnw+u1QWz43rx6KM0NWbnZxCRL+SEhTZvuTCOJydWh8lIk4Uo3r7\ntkd702yFeCc/UZoe8YztVX3z+3h3T2AeZf3ymN19HNIDt7JL3M9nn+kD+kezh4J9\niY6N1zka+dYjqiCIzDmC7nMmvU4zt6E3zkbUsxQilSmxsZpGa6nIQwJlhR1zVyPu\nMYSKOiPwguxyqs61WcvZ3iSbG55T2/gXQVqtez1KgQKBgQD4irXnj3rUKOcm36Wk\npPHGX+4W53/MzWHSz3waav8xCgBh3pNuqS10aimlF3X0dg8Q3Bxdw7H6G1meWu4p\nvzN0ye8ayEfJ7WSas0W+nQ4jPm2riH5xr/FSOR1Hk66DcfyXdWj8NbDwHxkOLAzZ\nu05A0EvIdAnIqQfCGK1nvtWyQQKBgQDVMMrztD4uAG9MiTO78nLlUJUmF2UQyU9A\nPvUKG5LKyGIWECWQInRugWdCy/pvaZSnrL2FbRBo9hyzjckO38TxNHW0dtYFZsaQ\nt1yDdkUh1ADm+lcUWisgNEJq1tlwX6YXUNxps/mLe2OuEx4x2G4EHT5BuVL5TsT6\ndU9R/7GuuwKBgQDb588e/SJOiYpMKOGklZSeUs6UUWwQR0Bgh9hkf3sfZpwD/C0y\nYlmngS8rtWBWVgbqgfZPOBFHG9/lbsr/AP+EPA9lAQQTtAmpnyj8iHWz1U6v0Osf\np+67M3GmnRJxlu9JC0dCsDbHnpxKn8sW8tVAH4yN5a2qb3Q/1hnHi2AEgQKBgQCH\nqzdcg3WWfLxXbnC3yICJohBoXzb3ycRfVDFIL2fpGEAjXj8pTGPf9wWponlK8Vui\noPEyucdZpFV7hl1O0+RWw1WsdHuakF8ItUdweZFhB7NTSjikQQV0xw0FRJi3RIdE\nCzdZJSlHNEyo+I2clujcQWffF3rQIOqFoKXLvvZqBQKBgQDjU67ZQ3evYTi5wRoG\nUkyenl6ncrgN3mIXUAxrhg78gf04h3lwEHlI3a9+XN6iZRfU7RCTrOE2ZoILEGA0\nuUPhU8GDq1eoJoYsEo6ZVBIr8O5glm1iYg2Qo94UhhEU1Bih/OFJff0jGlp0x9df\n3ZuSykPDSoqw5H6GW6dst6l9+w==\n-----END PRIVATE KEY-----\n",
"client_email": "firebase-adminsdk-fil7a@medcifyapp.iam.gserviceaccount.com",
"client_id": "101809883290789293870",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/firebase-adminsdk-fil7a%40medcifyapp.iam.gserviceaccount.com"
}

View File

@ -1,10 +0,0 @@
//--------TOKEN GENERATOR---------------//
const jwt=require('jsonwebtoken');
const tokenGenerator=(username)=>
{
const token=jwt.sign({username},process.env.JWT_KEY);
return token;
}
module.exports.tokenGenerator=tokenGenerator

View File

@ -1,12 +0,0 @@
const express=require('express');
const routes=require('./routes/api');
const dotenv=require('dotenv');
const QR = require("qrcode");
const app=express();
app.use('/',routes);
dotenv.config();
app.listen(process.env.PORT, ()=>console.log('Server Up Running'+process.env.PORT));

View File

@ -1,70 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('users', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
userType: {
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
image:{
type: Sequelize.TEXT
},
referralDoctor: {
type: Sequelize.STRING
},
email: {
type: Sequelize.STRING
},
contactNumber: {
type: Sequelize.STRING
},
whatsAppNumber: {
type: Sequelize.STRING
},
address1: {
type: Sequelize.STRING
},
address2: {
type: Sequelize.STRING
},
pincode: {
type: Sequelize.INTEGER
},
location: {
type: Sequelize.STRING
},
username: {
type: Sequelize.STRING
},
deviceId: {
type: Sequelize.STRING
},
password: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('users');
}
};

View File

@ -1,64 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('medicines', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
companyId: {
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.INTEGER
},
quantity:{
type: Sequelize.STRING
},
discount: {
type: Sequelize.INTEGER
},
image: {
type: Sequelize.STRING
},
howWorks: {
type: Sequelize.STRING
},
directionOfUse: {
type: Sequelize.STRING
},
prescription:{
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('medicines');
}
};

View File

@ -1,36 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('companies', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING,
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('companies');
}
};

View File

@ -1,78 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('subscriptions', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
userId:{
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
billNumber: {
type: Sequelize.STRING
},
planId: {
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.STRING
},
validity: {
type: Sequelize.STRING
},
paymentType: {
type: Sequelize.INTEGER
},
startDate: {
type: Sequelize.DATE
},
endDate: {
type: Sequelize.DATE
},
webhookResponse: {
type: Sequelize.TEXT
},
razorpayOrderId: {
type: Sequelize.TEXT
},
razorpayPaymentId: {
type: Sequelize.TEXT
},
razorpaySignature: {
type: Sequelize.TEXT
},
paymentStatus: {
type: Sequelize.TEXT
},
activeStatus: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('subscriptions');
}
};

View File

@ -1,36 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('roles', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('roles');
}
};

View File

@ -1,142 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('stores', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeName: {
type: Sequelize.STRING
},
storeUrl: {
type: Sequelize.STRING
},
ownerName: {
type: Sequelize.STRING
},
contactNumber: {
type: Sequelize.STRING
},
deviceId: {
type: Sequelize.TEXT
},
whatsAppNumber: {
type: Sequelize.STRING
},
address1: {
type: Sequelize.STRING
},
address2: {
type: Sequelize.STRING
},
pincode: {
type: Sequelize.INTEGER
},
enablePrescription: {
type: Sequelize.INTEGER
},
location: {
type: Sequelize.STRING
},
storeImage: {
type: Sequelize.STRING
},
storeDoc: {
type: Sequelize.STRING
},
qrCode: {
type: Sequelize.TEXT
},
openingTime: {
type: Sequelize.TIME
},
closingTime: {
type: Sequelize.TIME
},
features: {
type: Sequelize.STRING
},
disclaimer: {
type: Sequelize.STRING
},
storeType: {
type: Sequelize.INTEGER
},
otp: {
type: Sequelize.INTEGER
},
verifyOtp: {
type: Sequelize.INTEGER
},
storeStatus: {
type: Sequelize.INTEGER
},
returnPolicy: {
type: Sequelize.STRING
},
pickup: {
type: Sequelize.INTEGER
},
wideDiscount: {
type: Sequelize.STRING
},
wideDiscountType: {
type: Sequelize.INTEGER
},
cashondelivery: {
type: Sequelize.INTEGER
},
upi: {
type: Sequelize.STRING
},
planId: {
type: Sequelize.INTEGER
},
planType: {
type: Sequelize.INTEGER
},
planValidity: {
type: Sequelize.INTEGER
},
planStatus: {
type: Sequelize.INTEGER
}
,trialStatus: {
type: Sequelize.INTEGER
},
startDate: {
type: Sequelize.DATE
},
endDate: {
type: Sequelize.DATE
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
},
upiQr: {
allowNull: false,
type: Sequelize.STRING
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('stores');
}
};

View File

@ -1,48 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('plans', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
type: {
type: Sequelize.STRING
},
planFor:{
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.DOUBLE
},
validity: {
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('plans');
}
};

View File

@ -1,45 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('prescriptions', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
prescription: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
date: {
type: Sequelize.DATE
},
userId: {
type: Sequelize.STRING
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('prescriptions');
}
};

View File

@ -1,36 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('orderProcesses', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('orderProcesses');
}
};

View File

@ -1,42 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('orderProcessHistories', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
orderId: {
type: Sequelize.INTEGER
},
statusDate: {
type: Sequelize.DATE
},
processStatus: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('orderProcessHistories');
}
};

View File

@ -1,60 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('carts', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
userId: {
type: Sequelize.STRING
},
storeId: {
type: Sequelize.INTEGER
},
medicineId: {
type: Sequelize.INTEGER
},
medAmt: {
type: Sequelize.DOUBLE
},
pres_required: {
type: Sequelize.INTEGER
},
quantity: {
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.STRING
},
discount: {
type: Sequelize.STRING
},
total: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('carts');
}
};

View File

@ -1,71 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('orders', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
date: {
type: Sequelize.DATE
},
userId: {
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
prescriptionId: {
type: Sequelize.INTEGER
},
deviceId:
{
allowNull: false,
type:Sequelize.STRING
},
quantity:{
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.DOUBLE
},
discount: {
type: Sequelize.DOUBLE
},
deliveryfee: {
type: Sequelize.DOUBLE
},
total: {
type: Sequelize.DOUBLE
},
paymentMethod:{
type : Sequelize.INTEGER},// 1 -pod, 2-upi
orderProcessId: {
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('orders');
}
};

View File

@ -1,54 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('orderDetails', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
orderId: {
type: Sequelize.INTEGER
},
medicineId: {
type: Sequelize.INTEGER
},
quantity: {
type: Sequelize.INTEGER
},
amount: {
type: Sequelize.DOUBLE
},
curdiscount: {
type: Sequelize.DOUBLE
},
discount: {
type: Sequelize.DOUBLE
},
total: {
type: Sequelize.DOUBLE
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('orderDetails');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('notifications', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
moduleName: {
type: Sequelize.STRING
},
messageDetail: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('notifications');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('medicineuses', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
medicineId: {
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('medicineuses');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('medicinesideeffects', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
name: {
type: Sequelize.STRING
},
medicineId: {
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('medicinesideeffects');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('storevisitors', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeid: {
type: Sequelize.INTEGER
},
count: {
type: Sequelize.INTEGER
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('storevisitors');
}
};

View File

@ -1,36 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('banners', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
image: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('banners');
}
};

View File

@ -1,45 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('deliveryPincodes', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
pincode: {
type: Sequelize.INTEGER
},
deliveryFee: {
type: Sequelize.STRING
},
free_above: {
type: Sequelize.STRING
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('deliveryPincodes');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('storeTimes', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
dayName: {
type: Sequelize.STRING
},
startTime: {
type: Sequelize.TIME
},
endTime: {
type: Sequelize.TIME
},
status: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('storeTimes');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('orderNotifications', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
storeId: {
type: Sequelize.INTEGER
},
messageDetail: {
type: Sequelize.TEXT
},
status: {
type: Sequelize.INTEGER
},
createdBy: {
type: Sequelize.INTEGER
},
updatedBy: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('orderNotifications');
}
};

View File

@ -1,39 +0,0 @@
'use strict';
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('settings', {
id: {
allowNull: false,
autoIncrement: true,
primaryKey: true,
type: Sequelize.INTEGER
},
razor_key: {
type: Sequelize.TEXT
},
razor_secretkey: {
type: Sequelize.TEXT
},
hook_url: {
type: Sequelize.TEXT
},
hook_secretkey: {
type: Sequelize.TEXT
},
status: {
type: Sequelize.INTEGER
},
createdAt: {
allowNull: false,
type: Sequelize.DATE
},
updatedAt: {
allowNull: false,
type: Sequelize.DATE
}
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('settings');
}
};

View File

@ -1,26 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class banner extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
banner.init({
image: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'banner',
});
return banner;
};

View File

@ -1,35 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class cart extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.cart.belongsTo(models.medicines);
}
}
cart.init({
userId: DataTypes.STRING,
storeId: DataTypes.INTEGER,
medicineId: DataTypes.INTEGER,
medAmt: DataTypes.DOUBLE,
pres_required: DataTypes.INTEGER,
quantity: DataTypes.INTEGER,
amount: DataTypes.STRING,
discount: DataTypes.STRING,
total: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'cart',
});
return cart;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class company extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.medicines.belongsTo(models.company);
}
}
company.init({
name: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'company',
});
return company;
};

View File

@ -1,29 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class deliveryPincode extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
deliveryPincode.init({
storeId: DataTypes.INTEGER,
pincode: DataTypes.INTEGER,
deliveryFee: DataTypes.STRING,
free_above: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'deliveryPincode',
});
return deliveryPincode;
};

View File

@ -1,44 +0,0 @@
'use strict';
const fs = require('fs');
const path = require('path');
const Sequelize = require('sequelize');
const basename = path.basename(__filename);
const env = process.env.NODE_ENV || 'development';
const config = require(__dirname + '/../config/config.json')[env];
const db = {};
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else {
sequelize = new Sequelize(config.database, config.username, config.password, config);
}
fs
.readdirSync(__dirname)
.filter(file => {
return (file.indexOf('.') !== 0) && (file !== basename) && (file.slice(-3) === '.js');
})
.forEach(file => {
const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes);
db[model.name] = model;
});
Object.keys(db).forEach(modelName => {
if (db[modelName].associate) {
db[modelName].associate(db);
}
});
db.sequelize = sequelize;
db.Sequelize = Sequelize;
try {
sequelize.authenticate();
console.log('Connection has been established successfully.');
} catch (error) {
console.error('Unable to connect to the database:', error);
}
module.exports = db;

View File

@ -1,45 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class medicines extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models)
{
// Joint Query
models.company.hasOne(models.medicines);
models.store.hasOne(models.medicines);
models.medicines.hasOne(models.cart);
models.medicines.hasOne(models.orderDetails);
//For Medicine Table
models.medicineuses.belongsTo(models.medicines);
models.medicinesideeffects.belongsTo(models.medicines);
models.company.hasMany(models.medicines);
}
}
medicines.init({
name: DataTypes.STRING,
companyId: DataTypes.INTEGER,
storeId: DataTypes.INTEGER,
quantity: DataTypes.STRING,
amount: DataTypes.INTEGER,
discount: DataTypes.INTEGER,
image: DataTypes.STRING,
howWorks: DataTypes.STRING,
directionOfUse: DataTypes.STRING,
prescription: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'medicines',
});
return medicines;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class medicinesideeffects extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
models.medicines.hasMany(models.medicinesideeffects);
}
}
medicinesideeffects.init({
name: DataTypes.STRING,
medicineId: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'medicinesideeffects',
});
return medicinesideeffects;
};

View File

@ -1,28 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class medicineuses extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
models.medicines.hasMany(models.medicineuses);
}
}
medicineuses.init({
name: DataTypes.STRING,
medicineId: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'medicineuses',
});
return medicineuses;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class notification extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
notification.init({
moduleName: DataTypes.STRING,
messageDetail: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'notification',
});
return notification;
};

View File

@ -1,46 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
const orderdetails = require('./orderdetails');
module.exports = (sequelize, DataTypes) => {
class order extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.orderDetails.belongsTo(models.order);
models.order.hasOne(models.store);
models.order.hasOne(models.prescription);
models.order.hasOne(models.users);
models.order.hasOne(models.orderProcess);
//models.order.hasOne(models.medicines);
//models.order.hasOne(models.orderDetails);
}
}
order.init({
date: DataTypes.DATE,
userId: DataTypes.INTEGER,
storeId: DataTypes.INTEGER,
prescriptionId: DataTypes.INTEGER,
deviceId: DataTypes.STRING,
quantity: DataTypes.INTEGER,
amount: DataTypes.DOUBLE,
discount: DataTypes.DOUBLE,
deliveryfee: DataTypes.DOUBLE,
total: DataTypes.DOUBLE,
paymentMethod: DataTypes.INTEGER,
orderProcessId: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'order',
});
return order;
};

View File

@ -1,38 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
const order = require('./order');
module.exports = (sequelize, DataTypes) => {
class orderDetails extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.order.hasMany(models.orderDetails);
models.orderDetails.belongsTo(models.medicines);
//models.orderDetails.belongsTo(models.users);
//models.orderDetails.belongsTo(models.order);
}
}
orderDetails.init({
orderId: DataTypes.INTEGER,
medicineId: DataTypes.INTEGER,
quantity: DataTypes.INTEGER,
amount: DataTypes.DOUBLE,
curdiscount: DataTypes.DOUBLE,
discount: DataTypes.DOUBLE,
total: DataTypes.DOUBLE,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'orderDetails',
});
return orderDetails;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class orderNotification extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
orderNotification.init({
storeId: DataTypes.INTEGER,
messageDetail: DataTypes.TEXT,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'orderNotification',
});
return orderNotification;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class orderProcess extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.order.belongsTo(models.orderProcess);
}
}
orderProcess.init({
name: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'orderProcess',
});
return orderProcess;
};

View File

@ -1,28 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class orderProcessHistory extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
orderProcessHistory.init({
orderId: DataTypes.INTEGER,
statusDate: DataTypes.DATE,
processStatus: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'orderProcessHistory',
});
return orderProcessHistory;
};

View File

@ -1,32 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class plan extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.subscription.hasOne(models.plan);
models.store.belongsTo(models.plan);
}
}
plan.init({
name: DataTypes.STRING,
type: DataTypes.STRING,
planFor: DataTypes.INTEGER,
amount: DataTypes.DOUBLE,
validity: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'plan',
});
return plan;
};

View File

@ -1,30 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class prescription extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
models.prescription.hasOne(models.users);
models.order.belongsTo(models.prescription);
}
}
prescription.init({
storeId: DataTypes.INTEGER,
prescription: DataTypes.STRING,
status: DataTypes.INTEGER,
date:DataTypes.DATE,
userId:DataTypes.STRING,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'prescription',
});
return prescription;
};

View File

@ -1,26 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class role extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
role.init({
name: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'role',
});
return role;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class settings extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
settings.init({
razor_key: DataTypes.TEXT,
razor_secretkey: DataTypes.TEXT,
hook_url: DataTypes.TEXT,
hook_secretkey: DataTypes.TEXT,
status: DataTypes.INTEGER
}, {
sequelize,
modelName: 'settings',
});
return settings;
};

View File

@ -1,70 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class store extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.order.belongsTo(models.store);
models.medicines.belongsTo(models.store);
models.prescription.belongsTo(models.store);
//models.prescription.belongsTo(models.store);
models.store.hasOne(models.subscription);
models.plan.hasOne(models.store);
}
}
store.init({
storeName: DataTypes.STRING,
storeUrl: DataTypes.STRING,
ownerName: DataTypes.STRING,
contactNumber: DataTypes.STRING,
deviceId: DataTypes.TEXT,
whatsAppNumber: DataTypes.STRING,
address1: DataTypes.STRING,
address2: DataTypes.STRING,
pincode: DataTypes.INTEGER,
location: DataTypes.STRING,
storeImage: DataTypes.STRING,
qrCode: DataTypes.TEXT,
storeDoc: DataTypes.STRING,
openingTime: DataTypes.TIME,
closingTime: DataTypes.TIME,
features: DataTypes.STRING,
disclaimer: DataTypes.STRING,
storeType: DataTypes.INTEGER,
otp:DataTypes.INTEGER,
verifyOtp:DataTypes.INTEGER,
storeStatus:DataTypes.INTEGER,
returnPolicy:DataTypes.STRING,
pickup:DataTypes.INTEGER,
enablePrescription:DataTypes.INTEGER,
wideDiscount:DataTypes.STRING,
wideDiscountType:DataTypes.INTEGER,
cashondelivery:DataTypes.INTEGER,
upi:DataTypes.STRING,
planId:DataTypes.INTEGER,
planType:DataTypes.INTEGER,
planValidity:DataTypes.INTEGER,
planStatus:DataTypes.INTEGER,
trialStatus:DataTypes.INTEGER,
startDate:DataTypes.DATE,
endDate:DataTypes.DATE,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER,
upiQr: DataTypes.STRING
},
{
sequelize,
modelName: 'store',
});
return store;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class storeTime extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
storeTime.init({
storeId: DataTypes.INTEGER,
dayName: DataTypes.STRING,
startTime: DataTypes.TIME,
endTime: DataTypes.TIME,
status: DataTypes.INTEGER
}, {
sequelize,
modelName: 'storeTime',
});
return storeTime;
};

View File

@ -1,27 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class storevisitors extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
}
}
storevisitors.init({
storeid: DataTypes.INTEGER,
count: DataTypes.INTEGER,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'storevisitors',
});
return storevisitors;
};

View File

@ -1,45 +0,0 @@
'use strict';
const {
Model
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class subscription extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.subscription.belongsTo(models.store);
models.subscription.hasOne(models.users);
models.subscription.belongsTo(models.plan);
}
}
subscription.init({
userId: DataTypes.INTEGER,
storeId: DataTypes.INTEGER,
billNumber: DataTypes.STRING,
planId: DataTypes.INTEGER,
amount: DataTypes.STRING,
validity: DataTypes.STRING,
paymentType: DataTypes.INTEGER,
startDate: DataTypes.DATE,
endDate: DataTypes.DATE,
webhookResponse: DataTypes.TEXT,
razorpayOrderId: DataTypes.TEXT,
razorpayPaymentId: DataTypes.TEXT,
razorpaySignature: DataTypes.TEXT,
paymentStatus: DataTypes.TEXT,
activeStatus: DataTypes.STRING,
status: DataTypes.INTEGER,
createdBy: DataTypes.INTEGER,
updatedBy: DataTypes.INTEGER
}, {
sequelize,
modelName: 'subscription',
});
return subscription;
};

View File

@ -1,43 +0,0 @@
'use strict';
const {
Model, INTEGER
} = require('sequelize');
module.exports = (sequelize, DataTypes) => {
class users extends Model {
/**
* Helper method for defining associations.
* This method is not a part of Sequelize lifecycle.
* The `models/index` file will call this method automatically.
*/
static associate(models) {
// define association here
models.order.belongsTo(models.users);
models.prescription.belongsTo(models.users);
models.subscription.belongsTo(models.users);
//models.medicines.hasOne(models.orderDetails);
}
}
users.init({
userType: DataTypes.INTEGER,
name: DataTypes.STRING,
image: DataTypes.TEXT,
referralDoctor: DataTypes.STRING,
email: DataTypes.STRING,
contactNumber: DataTypes.STRING,
whatsAppNumber: DataTypes.STRING,
address1: DataTypes.STRING,
address2: DataTypes.STRING,
pincode: DataTypes.INTEGER,
location: DataTypes.STRING,
deviceId: DataTypes.STRING,
username: DataTypes.STRING,
password: DataTypes.STRING,
status: DataTypes.INTEGER
}, {
sequelize,
modelName: 'users',
});
return users;
};

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
"use strict"
require("../dist/bin.js")

View File

@ -1,16 +0,0 @@
#!/usr/bin/env node
var blessed = require('../')
, argv = process.argv.slice(2)
, cmd = argv.shift()
, tput;
tput = blessed.tput({
terminal: process.env.TERM,
termcap: !!process.env.USE_TERMCAP,
extended: true
});
if (tput[cmd]) {
process.stdout.write(tput[cmd].apply(tput, argv));
}

View File

@ -1,316 +0,0 @@
#!/usr/bin/env node
"use strict";
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.main = exports.generateRootName = void 0;
const fs = require("fs");
const path = require("path");
const util = require("util");
const pbjs = require("protobufjs/cli/pbjs");
const pbts = require("protobufjs/cli/pbts");
const readdir = util.promisify(fs.readdir);
const readFile = util.promisify(fs.readFile);
const writeFile = util.promisify(fs.writeFile);
const stat = util.promisify(fs.stat);
const pbjsMain = util.promisify(pbjs.main);
const pbtsMain = util.promisify(pbts.main);
const PROTO_LIST_REGEX = /_proto_list\.json$/;
const apacheLicense = `// Copyright ${new Date().getFullYear()} Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
`;
/**
* Recursively scans directories starting from `directory` and finds all files
* matching `PROTO_LIST_REGEX`.
*
* @param {string} directory Path to start the scan from.
* @return {Promise<string[]} Resolves to an array of strings, each element is a full path to a matching file.
*/
async function findProtoJsonFiles(directory) {
const result = [];
const files = await readdir(directory);
for (const file of files) {
const fullPath = path.join(directory, file);
const fileStat = await stat(fullPath);
if (fileStat.isFile() && file.match(PROTO_LIST_REGEX)) {
result.push(fullPath);
}
else if (fileStat.isDirectory()) {
const nested = await findProtoJsonFiles(fullPath);
result.push(...nested);
}
}
return result;
}
/**
* Normalizes the Linux path for the current operating system.
*
* @param {string} filePath Linux-style path (with forward slashes)
* @return {string} Normalized path.
*/
function normalizePath(filePath) {
return path.join(...filePath.split('/'));
}
function getAllEnums(dts) {
const result = new Set();
const lines = dts.split('\n');
const nestedIds = [];
let currentEnum = undefined;
for (const line of lines) {
const match = line.match(/^\s*(?:export )?(namespace|class|interface|enum) (\w+) .*{/);
if (match) {
const [, keyword, id] = match;
nestedIds.push(id);
if (keyword === 'enum') {
currentEnum = nestedIds.join('.');
result.add(currentEnum);
}
continue;
}
if (line.match(/^\s*}/)) {
nestedIds.pop();
currentEnum = undefined;
continue;
}
}
return result;
}
function updateDtsTypes(dts, enums) {
const lines = dts.split('\n');
const result = [];
for (const line of lines) {
let typeName = undefined;
// Enums can be used in interfaces and in classes.
// For simplicity, we'll check these two cases independently.
// encoding?: (google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding|null);
const interfaceMatch = line.match(/"?\w+"?\?: \(([\w.]+)\|null\);/);
if (interfaceMatch) {
typeName = interfaceMatch[1];
}
// public encoding: google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding;
const classMatch = line.match(/public \w+: ([\w.]+);/);
if (classMatch) {
typeName = classMatch[1];
}
if (line.match(/\(number\|Long(?:\|null)?\)/)) {
typeName = 'Long';
}
let replaced = line;
if (typeName && enums.has(typeName)) {
// enum: E => E|keyof typeof E to allow all string values
replaced = replaced.replace(typeName, `${typeName}|keyof typeof ${typeName}`);
}
else if (typeName === 'Uint8Array') {
// bytes: Uint8Array => Uint8Array|string to allow base64-encoded strings
replaced = replaced.replace(typeName, `${typeName}|string`);
}
else if (typeName === 'Long') {
// Longs can be passed as strings :(
// number|Long => number|Long|string
replaced = replaced.replace('number|Long', 'number|Long|string');
}
// add brackets if we have added a |
replaced = replaced.replace(/: ([\w.]+\|[ \w.|]+);/, ': ($1);');
result.push(replaced);
}
return result.join('\n');
}
function fixJsFile(js) {
// 1. fix protobufjs require: we don't want the libraries to
// depend on protobufjs, so we re-export it from google-gax
js = js.replace('require("protobufjs/minimal")', 'require("google-gax").protobufMinimal');
// 2. add Apache license to the generated .js file
js = apacheLicense + js;
// 3. reformat JSDoc reference link in the comments
js = js.replace(/{@link (.*?)#(.*?)}/g, '{@link $1|$2}');
return js;
}
function fixDtsFile(dts) {
// 1. fix for pbts output: the corresponding protobufjs PR
// https://github.com/protobufjs/protobuf.js/pull/1166
// is merged but not yet released.
if (!dts.match(/import \* as Long/)) {
dts = 'import * as Long from "long";\n' + dts;
}
// 2. fix protobufjs import: we don't want the libraries to
// depend on protobufjs, so we re-export it from google-gax
dts = dts.replace('import * as $protobuf from "protobufjs"', 'import {protobuf as $protobuf} from "google-gax"');
// 3. add Apache license to the generated .d.ts file
dts = apacheLicense + dts;
// 4. major hack: update types to allow passing strings
// where enums, longs, or bytes are expected
const enums = getAllEnums(dts);
dts = updateDtsTypes(dts, enums);
return dts;
}
/**
* Returns a combined list of proto files listed in all JSON files given.
*
* @param {string[]} protoJsonFiles List of JSON files to parse
* @return {Promise<string[]>} Resolves to an array of proto files.
*/
async function buildListOfProtos(protoJsonFiles) {
const result = [];
for (const file of protoJsonFiles) {
const directory = path.dirname(file);
const content = await readFile(file);
const list = JSON.parse(content.toString()).map((filePath) => path.join(directory, normalizePath(filePath)));
result.push(...list);
}
return result;
}
/**
* Runs `pbjs` to compile the given proto files, placing the result into
* `./protos/protos.json`. No support for changing output filename for now
* (but it's a TODO!)
*
* @param {string} rootName Name of the root object for pbjs static module (-r option)
* @param {string[]} protos List of proto files to compile.
*/
async function compileProtos(rootName, protos, skipJson = false) {
if (!skipJson) {
// generate protos.json file from proto list
const jsonOutput = path.join('protos', 'protos.json');
if (protos.length === 0) {
// no input file, just emit an empty object
await writeFile(jsonOutput, '{}');
return;
}
const pbjsArgs4JSON = [
'--target',
'json',
'-p',
'protos',
'-p',
path.join(__dirname, '..', '..', 'protos'),
'-o',
jsonOutput,
];
pbjsArgs4JSON.push(...protos);
await pbjsMain(pbjsArgs4JSON);
}
// generate protos/protos.js from protos.json
const jsOutput = path.join('protos', 'protos.js');
const pbjsArgs4js = [
'-r',
rootName,
'--target',
'static-module',
'-p',
'protos',
'-p',
path.join(__dirname, '..', '..', 'protos'),
'-o',
jsOutput,
];
pbjsArgs4js.push(...protos);
await pbjsMain(pbjsArgs4js);
let jsResult = (await readFile(jsOutput)).toString();
jsResult = fixJsFile(jsResult);
await writeFile(jsOutput, jsResult);
// generate protos/protos.d.ts
const tsOutput = path.join('protos', 'protos.d.ts');
const pbjsArgs4ts = [jsOutput, '-o', tsOutput];
await pbtsMain(pbjsArgs4ts);
let tsResult = (await readFile(tsOutput)).toString();
tsResult = fixDtsFile(tsResult);
await writeFile(tsOutput, tsResult);
}
/**
*
* @param directories List of directories to process. Normally, just the
* `./src` folder of the given client library.
* @return {Promise<string>} Resolves to a unique name for protobuf root to use in the JS static module, or 'default'.
*/
async function generateRootName(directories) {
// We need to provide `-r root` option to `pbjs -t static-module`, otherwise
// we'll have big problems if two different libraries are used together.
// It's OK to play some guessing game here: if we locate `package.json`
// with a package name, we'll use it; otherwise, we'll fallback to 'default'.
for (const directory of directories) {
const packageJson = path.resolve(directory, '..', 'package.json');
if (fs.existsSync(packageJson)) {
const json = JSON.parse((await readFile(packageJson)).toString());
const name = json.name.replace(/[^\w\d]/g, '_');
const hopefullyUniqueName = `${name}_protos`;
return hopefullyUniqueName;
}
}
return 'default';
}
exports.generateRootName = generateRootName;
/**
* Main function. Takes an array of directories to process.
* Looks for JSON files matching `PROTO_LIST_REGEX`, parses them to get a list of all
* proto files used by the client library, and calls `pbjs` to compile them all into
* JSON (`pbjs -t json`).
*
* Exported to be called from a test.
*
* @param {string[]} directories List of directories to process. Normally, just the
* `./src` folder of the given client library.
*/
async function main(parameters) {
const protoJsonFiles = [];
let skipJson = false;
const directories = [];
for (const parameter of parameters) {
if (parameter === '--skip-json') {
skipJson = true;
continue;
}
// it's not an option so it's a directory
const directory = parameter;
directories.push(directory);
protoJsonFiles.push(...(await findProtoJsonFiles(directory)));
}
const rootName = await generateRootName(directories);
const protos = await buildListOfProtos(protoJsonFiles);
await compileProtos(rootName, protos, skipJson);
}
exports.main = main;
/**
* Shows the usage information.
*/
function usage() {
console.log(`Usage: node ${process.argv[1]} [--skip-json] directory ...`);
console.log(`Finds all files matching ${PROTO_LIST_REGEX} in the given directories.`);
console.log('Each of those files should contain a JSON array of proto files used by the');
console.log('client library. Those proto files will be compiled to JSON using pbjs tool');
console.log('from protobufjs.');
}
if (require.main === module) {
if (process.argv.length <= 2) {
usage();
// eslint-disable-next-line no-process-exit
process.exit(1);
}
// argv[0] is node.js binary, argv[1] is script path
main(process.argv.slice(2));
}
//# sourceMappingURL=compileProtos.js.map

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
var cli = require('../lib/cli'); cli.interpret();

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
var cli = require('../src/cli')
cli.default(process.argv)

View File

@ -1,77 +0,0 @@
#!/usr/bin/env node
/*
Copyright (C) 2012 Yusuke Suzuki <utatane.tea@gmail.com>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint sloppy:true node:true */
var fs = require('fs'),
path = require('path'),
root = path.join(path.dirname(fs.realpathSync(__filename)), '..'),
esprima = require('esprima'),
escodegen = require(root),
optionator = require('optionator')({
prepend: 'Usage: escodegen [options] file...',
options: [
{
option: 'config',
alias: 'c',
type: 'String',
description: 'configuration json for escodegen'
}
]
}),
args = optionator.parse(process.argv),
files = args._,
options,
esprimaOptions = {
raw: true,
tokens: true,
range: true,
comment: true
};
if (files.length === 0) {
console.log(optionator.generateHelp());
process.exit(1);
}
if (args.config) {
try {
options = JSON.parse(fs.readFileSync(args.config, 'utf-8'));
} catch (err) {
console.error('Error parsing config: ', err);
}
}
files.forEach(function (filename) {
var content = fs.readFileSync(filename, 'utf-8'),
syntax = esprima.parse(content, esprimaOptions);
if (options.comment) {
escodegen.attachComments(syntax, syntax.comments, syntax.tokens);
}
console.log(escodegen.generate(syntax, options));
});
/* vim: set sw=4 ts=4 et tw=80 : */

View File

@ -1,64 +0,0 @@
#!/usr/bin/env node
/*
Copyright (C) 2012 Yusuke Suzuki <utatane.tea@gmail.com>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint sloppy:true node:true */
var fs = require('fs'),
path = require('path'),
root = path.join(path.dirname(fs.realpathSync(__filename)), '..'),
escodegen = require(root),
optionator = require('optionator')({
prepend: 'Usage: esgenerate [options] file.json ...',
options: [
{
option: 'config',
alias: 'c',
type: 'String',
description: 'configuration json for escodegen'
}
]
}),
args = optionator.parse(process.argv),
files = args._,
options;
if (files.length === 0) {
console.log(optionator.generateHelp());
process.exit(1);
}
if (args.config) {
try {
options = JSON.parse(fs.readFileSync(args.config, 'utf-8'))
} catch (err) {
console.error('Error parsing config: ', err);
}
}
files.forEach(function (filename) {
var content = fs.readFileSync(filename, 'utf-8');
console.log(escodegen.generate(JSON.parse(content), options));
});
/* vim: set sw=4 ts=4 et tw=80 : */

View File

@ -1,139 +0,0 @@
#!/usr/bin/env node
/*
Copyright JS Foundation and other contributors, https://js.foundation/
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint sloppy:true node:true rhino:true */
var fs, esprima, fname, forceFile, content, options, syntax;
if (typeof require === 'function') {
fs = require('fs');
try {
esprima = require('esprima');
} catch (e) {
esprima = require('../');
}
} else if (typeof load === 'function') {
try {
load('esprima.js');
} catch (e) {
load('../esprima.js');
}
}
// Shims to Node.js objects when running under Rhino.
if (typeof console === 'undefined' && typeof process === 'undefined') {
console = { log: print };
fs = { readFileSync: readFile };
process = { argv: arguments, exit: quit };
process.argv.unshift('esparse.js');
process.argv.unshift('rhino');
}
function showUsage() {
console.log('Usage:');
console.log(' esparse [options] [file.js]');
console.log();
console.log('Available options:');
console.log();
console.log(' --comment Gather all line and block comments in an array');
console.log(' --loc Include line-column location info for each syntax node');
console.log(' --range Include index-based range for each syntax node');
console.log(' --raw Display the raw value of literals');
console.log(' --tokens List all tokens in an array');
console.log(' --tolerant Tolerate errors on a best-effort basis (experimental)');
console.log(' -v, --version Shows program version');
console.log();
process.exit(1);
}
options = {};
process.argv.splice(2).forEach(function (entry) {
if (forceFile || entry === '-' || entry.slice(0, 1) !== '-') {
if (typeof fname === 'string') {
console.log('Error: more than one input file.');
process.exit(1);
} else {
fname = entry;
}
} else if (entry === '-h' || entry === '--help') {
showUsage();
} else if (entry === '-v' || entry === '--version') {
console.log('ECMAScript Parser (using Esprima version', esprima.version, ')');
console.log();
process.exit(0);
} else if (entry === '--comment') {
options.comment = true;
} else if (entry === '--loc') {
options.loc = true;
} else if (entry === '--range') {
options.range = true;
} else if (entry === '--raw') {
options.raw = true;
} else if (entry === '--tokens') {
options.tokens = true;
} else if (entry === '--tolerant') {
options.tolerant = true;
} else if (entry === '--') {
forceFile = true;
} else {
console.log('Error: unknown option ' + entry + '.');
process.exit(1);
}
});
// Special handling for regular expression literal since we need to
// convert it to a string literal, otherwise it will be decoded
// as object "{}" and the regular expression would be lost.
function adjustRegexLiteral(key, value) {
if (key === 'value' && value instanceof RegExp) {
value = value.toString();
}
return value;
}
function run(content) {
syntax = esprima.parse(content, options);
console.log(JSON.stringify(syntax, adjustRegexLiteral, 4));
}
try {
if (fname && (fname !== '-' || forceFile)) {
run(fs.readFileSync(fname, 'utf-8'));
} else {
var content = '';
process.stdin.resume();
process.stdin.on('data', function(chunk) {
content += chunk;
});
process.stdin.on('end', function() {
run(content);
});
}
} catch (e) {
console.log('Error: ' + e.message);
process.exit(1);
}

View File

@ -1,236 +0,0 @@
#!/usr/bin/env node
/*
Copyright JS Foundation and other contributors, https://js.foundation/
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*jslint sloppy:true plusplus:true node:true rhino:true */
/*global phantom:true */
var fs, system, esprima, options, fnames, forceFile, count;
if (typeof esprima === 'undefined') {
// PhantomJS can only require() relative files
if (typeof phantom === 'object') {
fs = require('fs');
system = require('system');
esprima = require('./esprima');
} else if (typeof require === 'function') {
fs = require('fs');
try {
esprima = require('esprima');
} catch (e) {
esprima = require('../');
}
} else if (typeof load === 'function') {
try {
load('esprima.js');
} catch (e) {
load('../esprima.js');
}
}
}
// Shims to Node.js objects when running under PhantomJS 1.7+.
if (typeof phantom === 'object') {
fs.readFileSync = fs.read;
process = {
argv: [].slice.call(system.args),
exit: phantom.exit,
on: function (evt, callback) {
callback();
}
};
process.argv.unshift('phantomjs');
}
// Shims to Node.js objects when running under Rhino.
if (typeof console === 'undefined' && typeof process === 'undefined') {
console = { log: print };
fs = { readFileSync: readFile };
process = {
argv: arguments,
exit: quit,
on: function (evt, callback) {
callback();
}
};
process.argv.unshift('esvalidate.js');
process.argv.unshift('rhino');
}
function showUsage() {
console.log('Usage:');
console.log(' esvalidate [options] [file.js...]');
console.log();
console.log('Available options:');
console.log();
console.log(' --format=type Set the report format, plain (default) or junit');
console.log(' -v, --version Print program version');
console.log();
process.exit(1);
}
options = {
format: 'plain'
};
fnames = [];
process.argv.splice(2).forEach(function (entry) {
if (forceFile || entry === '-' || entry.slice(0, 1) !== '-') {
fnames.push(entry);
} else if (entry === '-h' || entry === '--help') {
showUsage();
} else if (entry === '-v' || entry === '--version') {
console.log('ECMAScript Validator (using Esprima version', esprima.version, ')');
console.log();
process.exit(0);
} else if (entry.slice(0, 9) === '--format=') {
options.format = entry.slice(9);
if (options.format !== 'plain' && options.format !== 'junit') {
console.log('Error: unknown report format ' + options.format + '.');
process.exit(1);
}
} else if (entry === '--') {
forceFile = true;
} else {
console.log('Error: unknown option ' + entry + '.');
process.exit(1);
}
});
if (fnames.length === 0) {
fnames.push('');
}
if (options.format === 'junit') {
console.log('<?xml version="1.0" encoding="UTF-8"?>');
console.log('<testsuites>');
}
count = 0;
function run(fname, content) {
var timestamp, syntax, name;
try {
if (typeof content !== 'string') {
throw content;
}
if (content[0] === '#' && content[1] === '!') {
content = '//' + content.substr(2, content.length);
}
timestamp = Date.now();
syntax = esprima.parse(content, { tolerant: true });
if (options.format === 'junit') {
name = fname;
if (name.lastIndexOf('/') >= 0) {
name = name.slice(name.lastIndexOf('/') + 1);
}
console.log('<testsuite name="' + fname + '" errors="0" ' +
' failures="' + syntax.errors.length + '" ' +
' tests="' + syntax.errors.length + '" ' +
' time="' + Math.round((Date.now() - timestamp) / 1000) +
'">');
syntax.errors.forEach(function (error) {
var msg = error.message;
msg = msg.replace(/^Line\ [0-9]*\:\ /, '');
console.log(' <testcase name="Line ' + error.lineNumber + ': ' + msg + '" ' +
' time="0">');
console.log(' <error type="SyntaxError" message="' + error.message + '">' +
error.message + '(' + name + ':' + error.lineNumber + ')' +
'</error>');
console.log(' </testcase>');
});
console.log('</testsuite>');
} else if (options.format === 'plain') {
syntax.errors.forEach(function (error) {
var msg = error.message;
msg = msg.replace(/^Line\ [0-9]*\:\ /, '');
msg = fname + ':' + error.lineNumber + ': ' + msg;
console.log(msg);
++count;
});
}
} catch (e) {
++count;
if (options.format === 'junit') {
console.log('<testsuite name="' + fname + '" errors="1" failures="0" tests="1" ' +
' time="' + Math.round((Date.now() - timestamp) / 1000) + '">');
console.log(' <testcase name="' + e.message + '" ' + ' time="0">');
console.log(' <error type="ParseError" message="' + e.message + '">' +
e.message + '(' + fname + ((e.lineNumber) ? ':' + e.lineNumber : '') +
')</error>');
console.log(' </testcase>');
console.log('</testsuite>');
} else {
console.log(fname + ':' + e.lineNumber + ': ' + e.message.replace(/^Line\ [0-9]*\:\ /, ''));
}
}
}
fnames.forEach(function (fname) {
var content = '';
try {
if (fname && (fname !== '-' || forceFile)) {
content = fs.readFileSync(fname, 'utf-8');
} else {
fname = '';
process.stdin.resume();
process.stdin.on('data', function(chunk) {
content += chunk;
});
process.stdin.on('end', function() {
run(fname, content);
});
return;
}
} catch (e) {
content = e;
}
run(fname, content);
});
process.on('exit', function () {
if (options.format === 'junit') {
console.log('</testsuites>');
}
if (count > 0) {
process.exit(1);
}
if (count === 0 && typeof phantom === 'object') {
process.exit(0);
}
});

View File

@ -1,71 +0,0 @@
#!/usr/bin/env node
"use strict";
// The MIT License (MIT)
//
// Copyright (c) 2022 Firebase
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
Object.defineProperty(exports, "__esModule", { value: true });
const express = require("express");
const loader_1 = require("../runtime/loader");
function printUsageAndExit() {
console.error(`
Usage: firebase-functions [functionsDir]
Arguments:
- functionsDir: Directory containing source code for Firebase Functions.
`);
process.exit(1);
}
let functionsDir = '.';
const args = process.argv.slice(2);
if (args.length > 1) {
if (args[0] === '-h' || args[0] === '--help') {
printUsageAndExit();
}
functionsDir = args[0];
}
let server;
const app = express();
async function handleQuitquitquit(req, res) {
res.send('ok');
server.close(() => console.log('shutdown requested via /__/quitquitquit'));
}
app.get('/__/quitquitquit', handleQuitquitquit);
app.post('/__/quitquitquit', handleQuitquitquit);
if (process.env.FUNCTIONS_CONTROL_API === 'true') {
app.get('/__/functions.yaml', async (req, res) => {
try {
const stack = await (0, loader_1.loadStack)(functionsDir);
res.setHeader('content-type', 'text/yaml');
res.send(JSON.stringify(stack));
}
catch (e) {
res
.status(400)
.send(`Failed to generate manifest from function source: ${e}`);
}
});
}
let port = 8080;
if (process.env.PORT) {
port = Number.parseInt(process.env.PORT);
}
console.log('Serving at port', port);
server = app.listen(port);

View File

@ -1,26 +0,0 @@
#!/usr/bin/env node
"use strict";
/**
* Copyright 2018 Google LLC
*
* Distributed under MIT license.
* See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
Object.defineProperty(exports, "__esModule", { value: true });
const gp12 = require("../index");
const argv = process.argv;
const p12Path = argv[2];
if (!p12Path) {
console.error('Please specify a *.p12 file to convert.');
process.exitCode = 1;
}
gp12.getPem(p12Path, (err, pem) => {
if (err) {
console.log(err);
process.exitCode = 1;
}
else {
console.log(pem);
}
});
//# sourceMappingURL=gp12-pem.js.map

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
var cli = require('../lib/cli'); cli.interpret();

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
'use strict'
process.exit(require('./') ? 0 : 1)

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\is-ci\bin.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../is-ci/bin.js" $args
} else {
& "$basedir/node$exe" "$basedir/../is-ci/bin.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../is-ci/bin.js" $args
} else {
& "node$exe" "$basedir/../is-ci/bin.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,4 +0,0 @@
#!/usr/bin/env node
var cli = require('../lib/cli');
cli.interpret();

View File

@ -1,186 +0,0 @@
#!/usr/bin/env node
/**
* yaml2json cli program
*/
var YAML = require('../lib/Yaml.js');
var ArgumentParser = require('argparse').ArgumentParser;
var cli = new ArgumentParser({
prog: "json2yaml",
version: require('../package.json').version,
addHelp: true
});
cli.addArgument(
['-d', '--depth'],
{
action: 'store',
type: 'int',
help: 'Set minimum level of depth before generating inline YAML (default: 2).'
}
);
cli.addArgument(
['-i', '--indentation'],
{
action: 'store',
type: 'int',
help: 'Number of space characters used to indent code (default: 2).',
}
);
cli.addArgument(
['-s', '--save'],
{
help: 'Save output inside YML file(s) with the same name.',
action: 'storeTrue'
}
);
cli.addArgument(
['-r', '--recursive'],
{
help: 'If the input is a directory, also find JSON files in sub-directories recursively.',
action: 'storeTrue'
}
);
cli.addArgument(
['-w', '--watch'],
{
help: 'Watch for changes.',
action: 'storeTrue'
}
);
cli.addArgument(['input'], {
help: 'JSON file or directory containing JSON files or - to read JSON from stdin.'
});
try {
var options = cli.parseArgs();
var path = require('path');
var fs = require('fs');
var glob = require('glob');
var rootPath = process.cwd();
var parsePath = function(input) {
if (input == '-') return '-';
var output;
if (!(input != null)) {
return rootPath;
}
output = path.normalize(input);
if (output.length === 0) {
return rootPath;
}
if (output.charAt(0) !== '/') {
output = path.normalize(rootPath + '/./' + output);
}
if (output.length > 1 && output.charAt(output.length - 1) === '/') {
return output.substr(0, output.length - 1);
}
return output;
};
// Find files
var findFiles = function(input) {
if (input != '-' && input != null) {
var isDirectory = fs.statSync(input).isDirectory();
var files = [];
if (!isDirectory) {
files.push(input);
}
else {
if (options.recursive) {
files = files.concat(glob.sync(input+'/**/*.json'));
}
else {
files = files.concat(glob.sync(input+'/*.json'));
}
}
return files;
}
return null;
};
// Convert to JSON
var convertToYAML = function(input, inline, save, spaces, str) {
var yaml;
if (inline == null) inline = 2;
if (spaces == null) spaces = 2;
if (str == null) {
str = ''+fs.readFileSync(input);
}
yaml = YAML.dump(JSON.parse(str), inline, spaces);
if (!save || input == null) {
// Ouput result
process.stdout.write(yaml);
}
else {
var output;
if (input.substring(input.length-5) == '.json') {
output = input.substr(0, input.length-5) + '.yaml';
}
else {
output = input + '.yaml';
}
// Write file
var file = fs.openSync(output, 'w+');
fs.writeSync(file, yaml);
fs.closeSync(file);
process.stdout.write("saved "+output+"\n");
}
};
var input = parsePath(options.input);
var mtimes = [];
var runCommand = function() {
try {
var files = findFiles(input);
if (files != null) {
var len = files.length;
for (var i = 0; i < len; i++) {
var file = files[i];
var stat = fs.statSync(file);
var time = stat.mtime.getTime();
if (!stat.isDirectory()) {
if (!mtimes[file] || mtimes[file] < time) {
mtimes[file] = time;
convertToYAML(file, options.depth, options.save, options.indentation);
}
}
}
} else {
// Read from STDIN
var stdin = process.openStdin();
var data = "";
stdin.on('data', function(chunk) {
data += chunk;
});
stdin.on('end', function() {
convertToYAML(null, options.depth, options.save, options.indentation, data);
});
}
} catch (e) {
process.stderr.write((e.message ? e.message : e)+"\n");
}
};
if (!options.watch) {
runCommand();
} else {
runCommand();
setInterval(runCommand, 1000);
}
} catch (e) {
process.stderr.write((e.message ? e.message : e)+"\n");
}

View File

@ -1,8 +0,0 @@
#!/usr/bin/env node
var mime = require('./mime.js');
var file = process.argv[2];
var type = mime.lookup(file);
process.stdout.write(type + '\n');

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mime\cli.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../mime/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../mime/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../mime/cli.js" $args
} else {
& "node$exe" "$basedir/../mime/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,33 +0,0 @@
#!/usr/bin/env node
var mkdirp = require('../');
var minimist = require('minimist');
var fs = require('fs');
var argv = minimist(process.argv.slice(2), {
alias: { m: 'mode', h: 'help' },
string: [ 'mode' ]
});
if (argv.help) {
fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout);
return;
}
var paths = argv._.slice();
var mode = argv.mode ? parseInt(argv.mode, 8) : undefined;
(function next () {
if (paths.length === 0) return;
var p = paths.shift();
if (mode === undefined) mkdirp(p, cb)
else mkdirp(p, mode, cb)
function cb (err) {
if (err) {
console.error(err.message);
process.exit(1);
}
else next();
}
})();

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
} else {
& "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
} else {
& "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,40 +0,0 @@
#!/usr/bin/env node
var needle = require('./../lib/needle');
function exit(code, str) {
console.log(str) || process.exit(code);
}
function usage() {
var out = ['Usage: needle [get|head|post|put|delete] url [query]'];
out.push('Examples: \n needle get google.com\n needle post server.com/api foo=bar');
exit(1, out.join('\n'))
}
if (process.argv[2] == '-v' || process.argv[2] == '--version')
exit(0, needle.version);
else if (process.argv[2] == null)
usage();
var method = process.argv[2],
url = process.argv[3],
options = { compressed: true, parse_response: true, follow_max: 5, timeout: 10000 };
if (!needle[method]) {
url = method;
method = 'get';
}
var callback = function(err, resp) {
if (err) return exit(1, "Error: " + err.message);
if (process.argv.indexOf('-i') != -1)
console.log(resp.headers) || console.log('');
console.log(resp.body.toString());
};
if (method == 'post' || method == 'put')
needle[method](url, process.argv[4], options, callback);
else
needle[method](url, options, callback);

View File

@ -1,16 +0,0 @@
#!/usr/bin/env node
const cli = require('../lib/cli');
const nodemon = require('../lib/');
const options = cli.parse(process.argv);
nodemon(options);
const fs = require('fs');
// checks for available update and returns an instance
const pkg = JSON.parse(fs.readFileSync(__dirname + '/../package.json'));
if (pkg.version.indexOf('0.0.0') !== 0 && options.noUpdateNotifier !== true) {
require('update-notifier')({ pkg }).notify();
}

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nodemon\bin\nodemon.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../nodemon/bin/nodemon.js" $args
} else {
& "$basedir/node$exe" "$basedir/../nodemon/bin/nodemon.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../nodemon/bin/nodemon.js" $args
} else {
& "node$exe" "$basedir/../nodemon/bin/nodemon.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,112 +0,0 @@
#!/usr/bin/env node
const touch = require("../index.js")
const usage = code => {
console[code ? 'error' : 'log'](
'usage:\n' +
'touch [-acfm] [-r file] [-t [[CC]YY]MMDDhhmm[.SS]] file ...'
)
process.exit(code)
}
const singleFlags = {
a: 'atime',
m: 'mtime',
c: 'nocreate',
f: 'force'
}
const singleOpts = {
r: 'ref',
t: 'time'
}
const files = []
const args = process.argv.slice(2)
const options = {}
for (let i = 0; i < args.length; i++) {
const arg = args[i]
if (!arg.match(/^-/)) {
files.push(arg)
continue
}
// expand shorthands
if (arg.charAt(1) !== '-') {
const expand = []
for (let f = 1; f < arg.length; f++) {
const fc = arg.charAt(f)
const sf = singleFlags[fc]
const so = singleOpts[fc]
if (sf)
expand.push('--' + sf)
else if (so) {
const soslice = arg.slice(f + 1)
const soval = soslice.charAt(0) === '=' ? soslice : '=' + soslice
expand.push('--' + so + soval)
f = arg.length
} else if (arg !== '-' + fc)
expand.push('-' + fc)
}
if (expand.length) {
args.splice.apply(args, [i, 1].concat(expand))
i--
continue
}
}
const argsplit = arg.split('=')
const key = argsplit.shift().replace(/^\-\-/, '')
const val = argsplit.length ? argsplit.join('=') : null
switch (key) {
case 'time':
const timestr = val || args[++i]
// [-t [[CC]YY]MMDDhhmm[.SS]]
const parsedtime = timestr.match(
/^(([0-9]{2})?([0-9]{2}))?([0-9]{2})([0-9]{2})([0-9]{2})([0-9]{2})(\.([0-9]{2}))?$/
)
if (!parsedtime) {
console.error('touch: out of range or illegal ' +
'time specification: ' +
'[[CC]YY]MMDDhhmm[.SS]')
process.exit(1)
} else {
const y = +parsedtime[1]
const year = parsedtime[2] ? y
: y <= 68 ? 2000 + y
: 1900 + y
const MM = +parsedtime[4] - 1
const dd = +parsedtime[5]
const hh = +parsedtime[6]
const mm = +parsedtime[7]
const ss = +parsedtime[8]
options.time = new Date(Date.UTC(year, MM, dd, hh, mm, ss))
}
continue
case 'ref':
options.ref = val || args[++i]
continue
case 'mtime':
case 'nocreate':
case 'atime':
case 'force':
options[key] = true
continue
default:
console.error('touch: illegal option -- ' + arg)
usage(1)
}
}
if (!files.length)
usage()
process.exitCode = 0
Promise.all(files.map(f => touch(f, options)))
.catch(er => process.exitCode = 1)

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\touch\bin\nodetouch.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../touch/bin/nodetouch.js" $args
} else {
& "$basedir/node$exe" "$basedir/../touch/bin/nodetouch.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../touch/bin/nodetouch.js" $args
} else {
& "node$exe" "$basedir/../touch/bin/nodetouch.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,44 +0,0 @@
#!/usr/bin/env node
var nopt = require("../lib/nopt")
, types = { num: Number
, bool: Boolean
, help: Boolean
, list: Array
, "num-list": [Number, Array]
, "str-list": [String, Array]
, "bool-list": [Boolean, Array]
, str: String }
, shorthands = { s: [ "--str", "astring" ]
, b: [ "--bool" ]
, nb: [ "--no-bool" ]
, tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
, "?": ["--help"]
, h: ["--help"]
, H: ["--help"]
, n: [ "--num", "125" ] }
, parsed = nopt( types
, shorthands
, process.argv
, 2 )
console.log("parsed", parsed)
if (parsed.help) {
console.log("")
console.log("nopt cli tester")
console.log("")
console.log("types")
console.log(Object.keys(types).map(function M (t) {
var type = types[t]
if (Array.isArray(type)) {
return [t, type.map(function (type) { return type.name })]
}
return [t, type && type.name]
}).reduce(function (s, i) {
s[i[0]] = i[1]
return s
}, {}))
console.log("")
console.log("shorthands")
console.log(shorthands)
}

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nopt\bin\nopt.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../nopt/bin/nopt.js" $args
} else {
& "node$exe" "$basedir/../nopt/bin/nopt.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

View File

@ -1,6 +0,0 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "cli", "pbjs.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -1,6 +0,0 @@
#!/usr/bin/env node
var path = require("path"),
cli = require(path.join(__dirname, "..", "cli", "pbts.js"));
var ret = cli.main(process.argv.slice(2));
if (typeof ret === 'number')
process.exit(ret);

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
require('../lib/binaries/CLI.js');

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
require('../lib/binaries/DevCLI.js');

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
require('../lib/binaries/Runtime4Docker.js');

View File

@ -1,3 +0,0 @@
#!/usr/bin/env node
require('../lib/binaries/Runtime4Docker.js');

View File

@ -1,828 +0,0 @@
#!/usr/bin/env node
"use strict";
/**
* @license
* Copyright 2020 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const Protobuf = require("protobufjs");
const yargs = require("yargs");
const camelCase = require("lodash.camelcase");
const util_1 = require("../src/util");
class TextFormatter {
constructor() {
this.indentText = ' ';
this.indentValue = 0;
this.textParts = [];
}
indent() {
this.indentValue += 1;
}
unindent() {
this.indentValue -= 1;
}
writeLine(line) {
for (let i = 0; i < this.indentValue; i += 1) {
this.textParts.push(this.indentText);
}
this.textParts.push(line);
this.textParts.push('\n');
}
getFullText() {
return this.textParts.join('');
}
}
// GENERATOR UTILITY FUNCTIONS
function compareName(x, y) {
if (x.name < y.name) {
return -1;
}
else if (x.name > y.name) {
return 1;
}
else {
return 0;
}
}
function isNamespaceBase(obj) {
return Array.isArray(obj.nestedArray);
}
function stripLeadingPeriod(name) {
return name.startsWith('.') ? name.substring(1) : name;
}
function getImportPath(to) {
/* If the thing we are importing is defined in a message, it is generated in
* the same file as that message. */
if (to.parent instanceof Protobuf.Type) {
return getImportPath(to.parent);
}
return stripLeadingPeriod(to.fullName).replace(/\./g, '/');
}
function getPath(to) {
return stripLeadingPeriod(to.fullName).replace(/\./g, '/') + '.ts';
}
function getPathToRoot(from) {
const depth = stripLeadingPeriod(from.fullName).split('.').length - 1;
if (depth === 0) {
return './';
}
let path = '';
for (let i = 0; i < depth; i++) {
path += '../';
}
return path;
}
function getRelativeImportPath(from, to) {
return getPathToRoot(from) + getImportPath(to);
}
function getTypeInterfaceName(type) {
return type.fullName.replace(/\./g, '_');
}
function getImportLine(dependency, from) {
const filePath = from === undefined ? './' + getImportPath(dependency) : getRelativeImportPath(from, dependency);
const typeInterfaceName = getTypeInterfaceName(dependency);
let importedTypes;
/* If the dependency is defined within a message, it will be generated in that
* message's file and exported using its typeInterfaceName. */
if (dependency.parent instanceof Protobuf.Type) {
if (dependency instanceof Protobuf.Type) {
importedTypes = `${typeInterfaceName}, ${typeInterfaceName}__Output`;
}
else if (dependency instanceof Protobuf.Enum) {
importedTypes = `${typeInterfaceName}`;
}
else if (dependency instanceof Protobuf.Service) {
importedTypes = `${typeInterfaceName}Client, ${typeInterfaceName}Definition`;
}
else {
throw new Error('Invalid object passed to getImportLine');
}
}
else {
if (dependency instanceof Protobuf.Type) {
importedTypes = `${dependency.name} as ${typeInterfaceName}, ${dependency.name}__Output as ${typeInterfaceName}__Output`;
}
else if (dependency instanceof Protobuf.Enum) {
importedTypes = `${dependency.name} as ${typeInterfaceName}`;
}
else if (dependency instanceof Protobuf.Service) {
importedTypes = `${dependency.name}Client as ${typeInterfaceName}Client, ${dependency.name}Definition as ${typeInterfaceName}Definition`;
}
else {
throw new Error('Invalid object passed to getImportLine');
}
}
return `import type { ${importedTypes} } from '${filePath}';`;
}
function getChildMessagesAndEnums(namespace) {
const messageList = [];
for (const nested of namespace.nestedArray) {
if (nested instanceof Protobuf.Type || nested instanceof Protobuf.Enum) {
messageList.push(nested);
}
if (isNamespaceBase(nested)) {
messageList.push(...getChildMessagesAndEnums(nested));
}
}
return messageList;
}
function formatComment(formatter, comment) {
if (!comment) {
return;
}
formatter.writeLine('/**');
for (const line of comment.split('\n')) {
formatter.writeLine(` * ${line.replace(/\*\//g, '* /')}`);
}
formatter.writeLine(' */');
}
// GENERATOR FUNCTIONS
function getTypeNamePermissive(fieldType, resolvedType, repeated, map) {
switch (fieldType) {
case 'double':
case 'float':
return 'number | string';
case 'int32':
case 'uint32':
case 'sint32':
case 'fixed32':
case 'sfixed32':
return 'number';
case 'int64':
case 'uint64':
case 'sint64':
case 'fixed64':
case 'sfixed64':
return 'number | string | Long';
case 'bool':
return 'boolean';
case 'string':
return 'string';
case 'bytes':
return 'Buffer | Uint8Array | string';
default:
if (resolvedType === null) {
throw new Error('Found field with no usable type');
}
const typeInterfaceName = getTypeInterfaceName(resolvedType);
if (resolvedType instanceof Protobuf.Type) {
if (repeated || map) {
return typeInterfaceName;
}
else {
return `${typeInterfaceName} | null`;
}
}
else {
return `${typeInterfaceName} | keyof typeof ${typeInterfaceName}`;
}
}
}
function getFieldTypePermissive(field) {
const valueType = getTypeNamePermissive(field.type, field.resolvedType, field.repeated, field.map);
if (field instanceof Protobuf.MapField) {
const keyType = field.keyType === 'string' ? 'string' : 'number';
return `{[key: ${keyType}]: ${valueType}}`;
}
else {
return valueType;
}
}
function generatePermissiveMessageInterface(formatter, messageType, options, nameOverride) {
if (options.includeComments) {
formatComment(formatter, messageType.comment);
}
if (messageType.fullName === '.google.protobuf.Any') {
/* This describes the behavior of the Protobuf.js Any wrapper fromObject
* replacement function */
formatter.writeLine('export type Any = AnyExtension | {');
formatter.writeLine(' type_url: string;');
formatter.writeLine(' value: Buffer | Uint8Array | string;');
formatter.writeLine('}');
return;
}
formatter.writeLine(`export interface ${nameOverride !== null && nameOverride !== void 0 ? nameOverride : messageType.name} {`);
formatter.indent();
for (const field of messageType.fieldsArray) {
const repeatedString = field.repeated ? '[]' : '';
const type = getFieldTypePermissive(field);
if (options.includeComments) {
formatComment(formatter, field.comment);
}
formatter.writeLine(`'${field.name}'?: (${type})${repeatedString};`);
}
for (const oneof of messageType.oneofsArray) {
const typeString = oneof.fieldsArray.map(field => `"${field.name}"`).join('|');
if (options.includeComments) {
formatComment(formatter, oneof.comment);
}
formatter.writeLine(`'${oneof.name}'?: ${typeString};`);
}
formatter.unindent();
formatter.writeLine('}');
}
function getTypeNameRestricted(fieldType, resolvedType, repeated, map, options) {
switch (fieldType) {
case 'double':
case 'float':
if (options.json) {
return 'number | string';
}
else {
return 'number';
}
case 'int32':
case 'uint32':
case 'sint32':
case 'fixed32':
case 'sfixed32':
return 'number';
case 'int64':
case 'uint64':
case 'sint64':
case 'fixed64':
case 'sfixed64':
if (options.longs === Number) {
return 'number';
}
else if (options.longs === String) {
return 'string';
}
else {
return 'Long';
}
case 'bool':
return 'boolean';
case 'string':
return 'string';
case 'bytes':
if (options.bytes === Array) {
return 'Uint8Array';
}
else if (options.bytes === String) {
return 'string';
}
else {
return 'Buffer';
}
default:
if (resolvedType === null) {
throw new Error('Found field with no usable type');
}
const typeInterfaceName = getTypeInterfaceName(resolvedType);
if (resolvedType instanceof Protobuf.Type) {
/* null is only used to represent absent message values if the defaults
* option is set, and only for non-repeated, non-map fields. */
if (options.defaults && !repeated && !map) {
return `${typeInterfaceName}__Output | null`;
}
else {
return `${typeInterfaceName}__Output`;
}
}
else {
if (options.enums == String) {
return `keyof typeof ${typeInterfaceName}`;
}
else {
return typeInterfaceName;
}
}
}
}
function getFieldTypeRestricted(field, options) {
const valueType = getTypeNameRestricted(field.type, field.resolvedType, field.repeated, field.map, options);
if (field instanceof Protobuf.MapField) {
const keyType = field.keyType === 'string' ? 'string' : 'number';
return `{[key: ${keyType}]: ${valueType}}`;
}
else {
return valueType;
}
}
function generateRestrictedMessageInterface(formatter, messageType, options, nameOverride) {
var _a, _b, _c;
if (options.includeComments) {
formatComment(formatter, messageType.comment);
}
if (messageType.fullName === '.google.protobuf.Any' && options.json) {
/* This describes the behavior of the Protobuf.js Any wrapper toObject
* replacement function */
let optionalString = options.defaults ? '' : '?';
formatter.writeLine('export type Any__Output = AnyExtension | {');
formatter.writeLine(` type_url${optionalString}: string;`);
formatter.writeLine(` value${optionalString}: ${getTypeNameRestricted('bytes', null, false, false, options)};`);
formatter.writeLine('}');
return;
}
formatter.writeLine(`export interface ${nameOverride !== null && nameOverride !== void 0 ? nameOverride : messageType.name}__Output {`);
formatter.indent();
for (const field of messageType.fieldsArray) {
let fieldGuaranteed;
if (field.partOf) {
// The field is not guaranteed populated if it is part of a oneof
fieldGuaranteed = false;
}
else if (field.repeated) {
fieldGuaranteed = (_a = (options.defaults || options.arrays)) !== null && _a !== void 0 ? _a : false;
}
else if (field.map) {
fieldGuaranteed = (_b = (options.defaults || options.objects)) !== null && _b !== void 0 ? _b : false;
}
else {
fieldGuaranteed = (_c = options.defaults) !== null && _c !== void 0 ? _c : false;
}
const optionalString = fieldGuaranteed ? '' : '?';
const repeatedString = field.repeated ? '[]' : '';
const type = getFieldTypeRestricted(field, options);
if (options.includeComments) {
formatComment(formatter, field.comment);
}
formatter.writeLine(`'${field.name}'${optionalString}: (${type})${repeatedString};`);
}
if (options.oneofs) {
for (const oneof of messageType.oneofsArray) {
const typeString = oneof.fieldsArray.map(field => `"${field.name}"`).join('|');
if (options.includeComments) {
formatComment(formatter, oneof.comment);
}
formatter.writeLine(`'${oneof.name}': ${typeString};`);
}
}
formatter.unindent();
formatter.writeLine('}');
}
function generateMessageInterfaces(formatter, messageType, options) {
var _a, _b;
let usesLong = false;
let seenDeps = new Set();
const childTypes = getChildMessagesAndEnums(messageType);
formatter.writeLine(`// Original file: ${(_b = ((_a = messageType.filename) !== null && _a !== void 0 ? _a : 'null')) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, '/')}`);
formatter.writeLine('');
messageType.fieldsArray.sort((fieldA, fieldB) => fieldA.id - fieldB.id);
for (const field of messageType.fieldsArray) {
if (field.resolvedType && childTypes.indexOf(field.resolvedType) < 0) {
const dependency = field.resolvedType;
if (seenDeps.has(dependency.fullName)) {
continue;
}
seenDeps.add(dependency.fullName);
formatter.writeLine(getImportLine(dependency, messageType));
}
if (field.type.indexOf('64') >= 0) {
usesLong = true;
}
}
for (const childType of childTypes) {
if (childType instanceof Protobuf.Type) {
for (const field of childType.fieldsArray) {
if (field.resolvedType && childTypes.indexOf(field.resolvedType) < 0) {
const dependency = field.resolvedType;
if (seenDeps.has(dependency.fullName)) {
continue;
}
seenDeps.add(dependency.fullName);
formatter.writeLine(getImportLine(dependency, messageType));
}
if (field.type.indexOf('64') >= 0) {
usesLong = true;
}
}
}
}
if (usesLong) {
formatter.writeLine("import type { Long } from '@grpc/proto-loader';");
}
if (messageType.fullName === '.google.protobuf.Any') {
formatter.writeLine("import type { AnyExtension } from '@grpc/proto-loader';");
}
formatter.writeLine('');
for (const childType of childTypes.sort(compareName)) {
const nameOverride = getTypeInterfaceName(childType);
if (childType instanceof Protobuf.Type) {
generatePermissiveMessageInterface(formatter, childType, options, nameOverride);
formatter.writeLine('');
generateRestrictedMessageInterface(formatter, childType, options, nameOverride);
}
else {
generateEnumInterface(formatter, childType, options, nameOverride);
}
formatter.writeLine('');
}
generatePermissiveMessageInterface(formatter, messageType, options);
formatter.writeLine('');
generateRestrictedMessageInterface(formatter, messageType, options);
}
function generateEnumInterface(formatter, enumType, options, nameOverride) {
var _a, _b;
formatter.writeLine(`// Original file: ${(_b = ((_a = enumType.filename) !== null && _a !== void 0 ? _a : 'null')) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, '/')}`);
formatter.writeLine('');
if (options.includeComments) {
formatComment(formatter, enumType.comment);
}
formatter.writeLine(`export enum ${nameOverride !== null && nameOverride !== void 0 ? nameOverride : enumType.name} {`);
formatter.indent();
for (const key of Object.keys(enumType.values)) {
if (options.includeComments) {
formatComment(formatter, enumType.comments[key]);
}
formatter.writeLine(`${key} = ${enumType.values[key]},`);
}
formatter.unindent();
formatter.writeLine('}');
}
/**
* This is a list of methods that are exist in the generic Client class in the
* gRPC libraries. TypeScript has a problem with methods in subclasses with the
* same names as methods in the superclass, but with mismatched APIs. So, we
* avoid generating methods with these names in the service client interfaces.
* We always generate two service client methods per service method: one camel
* cased, and one with the original casing. So we will still generate one
* service client method for any conflicting name.
*
* Technically, at runtime conflicting name in the service client method
* actually shadows the original method, but TypeScript does not have a good
* way to represent that. So this change is not 100% accurate, but it gets the
* generated code to compile.
*
* This is just a list of the methods in the Client class definitions in
* grpc@1.24.11 and @grpc/grpc-js@1.4.0.
*/
const CLIENT_RESERVED_METHOD_NAMES = new Set([
'close',
'getChannel',
'waitForReady',
'makeUnaryRequest',
'makeClientStreamRequest',
'makeServerStreamRequest',
'makeBidiStreamRequest',
'resolveCallInterceptors',
/* These methods are private, but TypeScript is not happy with overriding even
* private methods with mismatched APIs. */
'checkOptionalUnaryResponseArguments',
'checkMetadataAndOptions'
]);
function generateServiceClientInterface(formatter, serviceType, options) {
if (options.includeComments) {
formatComment(formatter, serviceType.comment);
}
formatter.writeLine(`export interface ${serviceType.name}Client extends grpc.Client {`);
formatter.indent();
for (const methodName of Object.keys(serviceType.methods).sort()) {
const method = serviceType.methods[methodName];
for (const name of [methodName, camelCase(methodName)]) {
if (CLIENT_RESERVED_METHOD_NAMES.has(name)) {
continue;
}
if (options.includeComments) {
formatComment(formatter, method.comment);
}
const requestType = getTypeInterfaceName(method.resolvedRequestType);
const responseType = getTypeInterfaceName(method.resolvedResponseType) + '__Output';
const callbackType = `grpc.requestCallback<${responseType}>`;
if (method.requestStream) {
if (method.responseStream) {
// Bidi streaming
const callType = `grpc.ClientDuplexStream<${requestType}, ${responseType}>`;
formatter.writeLine(`${name}(metadata: grpc.Metadata, options?: grpc.CallOptions): ${callType};`);
formatter.writeLine(`${name}(options?: grpc.CallOptions): ${callType};`);
}
else {
// Client streaming
const callType = `grpc.ClientWritableStream<${requestType}>`;
formatter.writeLine(`${name}(metadata: grpc.Metadata, options: grpc.CallOptions, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(metadata: grpc.Metadata, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(options: grpc.CallOptions, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(callback: ${callbackType}): ${callType};`);
}
}
else {
if (method.responseStream) {
// Server streaming
const callType = `grpc.ClientReadableStream<${responseType}>`;
formatter.writeLine(`${name}(argument: ${requestType}, metadata: grpc.Metadata, options?: grpc.CallOptions): ${callType};`);
formatter.writeLine(`${name}(argument: ${requestType}, options?: grpc.CallOptions): ${callType};`);
}
else {
// Unary
const callType = 'grpc.ClientUnaryCall';
formatter.writeLine(`${name}(argument: ${requestType}, metadata: grpc.Metadata, options: grpc.CallOptions, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(argument: ${requestType}, metadata: grpc.Metadata, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(argument: ${requestType}, options: grpc.CallOptions, callback: ${callbackType}): ${callType};`);
formatter.writeLine(`${name}(argument: ${requestType}, callback: ${callbackType}): ${callType};`);
}
}
}
formatter.writeLine('');
}
formatter.unindent();
formatter.writeLine('}');
}
function generateServiceHandlerInterface(formatter, serviceType, options) {
if (options.includeComments) {
formatComment(formatter, serviceType.comment);
}
formatter.writeLine(`export interface ${serviceType.name}Handlers extends grpc.UntypedServiceImplementation {`);
formatter.indent();
for (const methodName of Object.keys(serviceType.methods).sort()) {
const method = serviceType.methods[methodName];
if (options.includeComments) {
formatComment(formatter, method.comment);
}
const requestType = getTypeInterfaceName(method.resolvedRequestType) + '__Output';
const responseType = getTypeInterfaceName(method.resolvedResponseType);
if (method.requestStream) {
if (method.responseStream) {
// Bidi streaming
formatter.writeLine(`${methodName}: grpc.handleBidiStreamingCall<${requestType}, ${responseType}>;`);
}
else {
// Client streaming
formatter.writeLine(`${methodName}: grpc.handleClientStreamingCall<${requestType}, ${responseType}>;`);
}
}
else {
if (method.responseStream) {
// Server streaming
formatter.writeLine(`${methodName}: grpc.handleServerStreamingCall<${requestType}, ${responseType}>;`);
}
else {
// Unary
formatter.writeLine(`${methodName}: grpc.handleUnaryCall<${requestType}, ${responseType}>;`);
}
}
formatter.writeLine('');
}
formatter.unindent();
formatter.writeLine('}');
}
function generateServiceDefinitionInterface(formatter, serviceType) {
formatter.writeLine(`export interface ${serviceType.name}Definition extends grpc.ServiceDefinition {`);
formatter.indent();
for (const methodName of Object.keys(serviceType.methods).sort()) {
const method = serviceType.methods[methodName];
const requestType = getTypeInterfaceName(method.resolvedRequestType);
const responseType = getTypeInterfaceName(method.resolvedResponseType);
formatter.writeLine(`${methodName}: MethodDefinition<${requestType}, ${responseType}, ${requestType}__Output, ${responseType}__Output>`);
}
formatter.unindent();
formatter.writeLine('}');
}
function generateServiceInterfaces(formatter, serviceType, options) {
var _a, _b;
formatter.writeLine(`// Original file: ${(_b = ((_a = serviceType.filename) !== null && _a !== void 0 ? _a : 'null')) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, '/')}`);
formatter.writeLine('');
const grpcImportPath = options.grpcLib.startsWith('.') ? getPathToRoot(serviceType) + options.grpcLib : options.grpcLib;
formatter.writeLine(`import type * as grpc from '${grpcImportPath}'`);
formatter.writeLine(`import type { MethodDefinition } from '@grpc/proto-loader'`);
const dependencies = new Set();
for (const method of serviceType.methodsArray) {
dependencies.add(method.resolvedRequestType);
dependencies.add(method.resolvedResponseType);
}
for (const dep of Array.from(dependencies.values()).sort(compareName)) {
formatter.writeLine(getImportLine(dep, serviceType));
}
formatter.writeLine('');
generateServiceClientInterface(formatter, serviceType, options);
formatter.writeLine('');
generateServiceHandlerInterface(formatter, serviceType, options);
formatter.writeLine('');
generateServiceDefinitionInterface(formatter, serviceType);
}
function containsDefinition(definitionType, namespace) {
for (const nested of namespace.nestedArray.sort(compareName)) {
if (nested instanceof definitionType) {
return true;
}
else if (isNamespaceBase(nested) && !(nested instanceof Protobuf.Type) && !(nested instanceof Protobuf.Enum) && containsDefinition(definitionType, nested)) {
return true;
}
}
return false;
}
function generateDefinitionImports(formatter, namespace, options) {
const imports = [];
if (containsDefinition(Protobuf.Enum, namespace)) {
imports.push('EnumTypeDefinition');
}
if (containsDefinition(Protobuf.Type, namespace)) {
imports.push('MessageTypeDefinition');
}
if (imports.length) {
formatter.writeLine(`import type { ${imports.join(', ')} } from '@grpc/proto-loader';`);
}
}
function generateServiceImports(formatter, namespace, options) {
for (const nested of namespace.nestedArray.sort(compareName)) {
if (nested instanceof Protobuf.Service) {
formatter.writeLine(getImportLine(nested));
}
else if (isNamespaceBase(nested) && !(nested instanceof Protobuf.Type) && !(nested instanceof Protobuf.Enum)) {
generateServiceImports(formatter, nested, options);
}
}
}
function generateSingleLoadedDefinitionType(formatter, nested, options) {
if (nested instanceof Protobuf.Service) {
if (options.includeComments) {
formatComment(formatter, nested.comment);
}
const typeInterfaceName = getTypeInterfaceName(nested);
formatter.writeLine(`${nested.name}: SubtypeConstructor<typeof grpc.Client, ${typeInterfaceName}Client> & { service: ${typeInterfaceName}Definition }`);
}
else if (nested instanceof Protobuf.Enum) {
formatter.writeLine(`${nested.name}: EnumTypeDefinition`);
}
else if (nested instanceof Protobuf.Type) {
formatter.writeLine(`${nested.name}: MessageTypeDefinition`);
}
else if (isNamespaceBase(nested)) {
generateLoadedDefinitionTypes(formatter, nested, options);
}
}
function generateLoadedDefinitionTypes(formatter, namespace, options) {
formatter.writeLine(`${namespace.name}: {`);
formatter.indent();
for (const nested of namespace.nestedArray.sort(compareName)) {
generateSingleLoadedDefinitionType(formatter, nested, options);
}
formatter.unindent();
formatter.writeLine('}');
}
function generateRootFile(formatter, root, options) {
formatter.writeLine(`import type * as grpc from '${options.grpcLib}';`);
generateDefinitionImports(formatter, root, options);
formatter.writeLine('');
generateServiceImports(formatter, root, options);
formatter.writeLine('');
formatter.writeLine('type SubtypeConstructor<Constructor extends new (...args: any) => any, Subtype> = {');
formatter.writeLine(' new(...args: ConstructorParameters<Constructor>): Subtype;');
formatter.writeLine('};');
formatter.writeLine('');
formatter.writeLine('export interface ProtoGrpcType {');
formatter.indent();
for (const nested of root.nestedArray) {
generateSingleLoadedDefinitionType(formatter, nested, options);
}
formatter.unindent();
formatter.writeLine('}');
formatter.writeLine('');
}
async function writeFile(filename, contents) {
await fs.promises.mkdir(path.dirname(filename), { recursive: true });
return fs.promises.writeFile(filename, contents);
}
function generateFilesForNamespace(namespace, options) {
const filePromises = [];
for (const nested of namespace.nestedArray) {
const fileFormatter = new TextFormatter();
if (nested instanceof Protobuf.Type) {
generateMessageInterfaces(fileFormatter, nested, options);
if (options.verbose) {
console.log(`Writing ${options.outDir}/${getPath(nested)} from file ${nested.filename}`);
}
filePromises.push(writeFile(`${options.outDir}/${getPath(nested)}`, fileFormatter.getFullText()));
}
else if (nested instanceof Protobuf.Enum) {
generateEnumInterface(fileFormatter, nested, options);
if (options.verbose) {
console.log(`Writing ${options.outDir}/${getPath(nested)} from file ${nested.filename}`);
}
filePromises.push(writeFile(`${options.outDir}/${getPath(nested)}`, fileFormatter.getFullText()));
}
else if (nested instanceof Protobuf.Service) {
generateServiceInterfaces(fileFormatter, nested, options);
if (options.verbose) {
console.log(`Writing ${options.outDir}/${getPath(nested)} from file ${nested.filename}`);
}
filePromises.push(writeFile(`${options.outDir}/${getPath(nested)}`, fileFormatter.getFullText()));
}
else if (isNamespaceBase(nested)) {
filePromises.push(...generateFilesForNamespace(nested, options));
}
}
return filePromises;
}
function writeFilesForRoot(root, masterFileName, options) {
const filePromises = [];
const masterFileFormatter = new TextFormatter();
generateRootFile(masterFileFormatter, root, options);
if (options.verbose) {
console.log(`Writing ${options.outDir}/${masterFileName}`);
}
filePromises.push(writeFile(`${options.outDir}/${masterFileName}`, masterFileFormatter.getFullText()));
filePromises.push(...generateFilesForNamespace(root, options));
return filePromises;
}
async function writeAllFiles(protoFiles, options) {
await fs.promises.mkdir(options.outDir, { recursive: true });
const basenameMap = new Map();
for (const filename of protoFiles) {
const basename = path.basename(filename).replace(/\.proto$/, '.ts');
if (basenameMap.has(basename)) {
basenameMap.get(basename).push(filename);
}
else {
basenameMap.set(basename, [filename]);
}
}
for (const [basename, filenames] of basenameMap.entries()) {
const loadedRoot = await util_1.loadProtosWithOptions(filenames, options);
writeFilesForRoot(loadedRoot, basename, options);
}
}
async function runScript() {
const argv = yargs
.parserConfiguration({
'parse-positional-numbers': false
})
.string(['includeDirs', 'grpcLib'])
.normalize(['includeDirs', 'outDir'])
.array('includeDirs')
.boolean(['keepCase', 'defaults', 'arrays', 'objects', 'oneofs', 'json', 'verbose', 'includeComments'])
.string(['longs', 'enums', 'bytes'])
.default('keepCase', false)
.default('defaults', false)
.default('arrays', false)
.default('objects', false)
.default('oneofs', false)
.default('json', false)
.default('includeComments', false)
.default('longs', 'Long')
.default('enums', 'number')
.default('bytes', 'Buffer')
.coerce('longs', value => {
switch (value) {
case 'String': return String;
case 'Number': return Number;
default: return undefined;
}
}).coerce('enums', value => {
if (value === 'String') {
return String;
}
else {
return undefined;
}
}).coerce('bytes', value => {
switch (value) {
case 'Array': return Array;
case 'String': return String;
default: return undefined;
}
}).alias({
includeDirs: 'I',
outDir: 'O',
verbose: 'v'
}).describe({
keepCase: 'Preserve the case of field names',
longs: 'The type that should be used to output 64 bit integer values. Can be String, Number',
enums: 'The type that should be used to output enum fields. Can be String',
bytes: 'The type that should be used to output bytes fields. Can be String, Array',
defaults: 'Output default values for omitted fields',
arrays: 'Output default values for omitted repeated fields even if --defaults is not set',
objects: 'Output default values for omitted message fields even if --defaults is not set',
oneofs: 'Output virtual oneof fields set to the present field\'s name',
json: 'Represent Infinity and NaN as strings in float fields. Also decode google.protobuf.Any automatically',
includeComments: 'Generate doc comments from comments in the original files',
includeDirs: 'Directories to search for included files',
outDir: 'Directory in which to output files',
grpcLib: 'The gRPC implementation library that these types will be used with'
}).demandOption(['outDir', 'grpcLib'])
.demand(1)
.usage('$0 [options] filenames...')
.epilogue('WARNING: This tool is in alpha. The CLI and generated code are subject to change')
.argv;
if (argv.verbose) {
console.log('Parsed arguments:', argv);
}
util_1.addCommonProtos();
writeAllFiles(argv._, Object.assign(Object.assign({}, argv), { alternateCommentMode: true })).then(() => {
if (argv.verbose) {
console.log('Success');
}
}, (error) => {
console.error(error);
process.exit(1);
});
}
if (require.main === module) {
runScript();
}
//# sourceMappingURL=proto-loader-gen-types.js.map

View File

@ -1,159 +0,0 @@
#!/usr/bin/env node
var yargs = require('yargs')
var qr = require('../lib')
function save (file, text, options) {
qr.toFile(file, text, options, function (err, data) {
if (err) {
console.error('Error:', err.message)
process.exit(1)
}
console.log('saved qrcode to: ' + file + '\n')
})
}
function print (text, options) {
options.type = 'terminal'
qr.toString(text, options, function (err, text) {
if (err) {
console.error('Error:', err.message)
process.exit(1)
}
console.log(text)
})
}
function parseOptions (args) {
return {
version: args.qversion,
errorCorrectionLevel: args.error,
type: args.type,
small: !!args.small,
inverse: !!args.inverse,
maskPattern: args.mask,
margin: args.qzone,
width: args.width,
scale: args.scale,
color: {
light: args.lightcolor,
dark: args.darkcolor
}
}
}
function processInputs (text, opts) {
if (!text.length) {
yargs.showHelp()
process.exit(1)
}
if (opts.output) {
save(opts.output, text, parseOptions(opts))
} else {
print(text, parseOptions(opts))
}
}
var argv = yargs
.detectLocale(false)
.usage('Usage: $0 [options] <input string>')
.option('v', {
alias: 'qversion',
description: 'QR Code symbol version (1 - 40)',
group: 'QR Code options:',
type: 'number'
})
.option('e', {
alias: 'error',
description: 'Error correction level',
choices: ['L', 'M', 'Q', 'H'],
group: 'QR Code options:'
})
.option('m', {
alias: 'mask',
description: 'Mask pattern (0 - 7)',
group: 'QR Code options:',
type: 'number'
})
.option('t', {
alias: 'type',
description: 'Output type',
choices: ['png', 'svg', 'utf8'],
implies: 'output',
group: 'Renderer options:'
})
.option('i', {
alias: 'inverse',
type: 'boolean',
description: 'Invert colors',
group: 'Renderer options:'
})
.option('w', {
alias: 'width',
description: 'Image width (px)',
conflicts: 'scale',
group: 'Renderer options:',
type: 'number'
})
.option('s', {
alias: 'scale',
description: 'Scale factor',
conflicts: 'width',
group: 'Renderer options:',
type: 'number'
})
.option('q', {
alias: 'qzone',
description: 'Quiet zone size',
group: 'Renderer options:',
type: 'number'
})
.option('l', {
alias: 'lightcolor',
description: 'Light RGBA hex color',
group: 'Renderer options:'
})
.option('d', {
alias: 'darkcolor',
description: 'Dark RGBA hex color',
group: 'Renderer options:'
})
.option('small', {
type: 'boolean',
description: 'Output smaller QR code to terminal',
conflicts: 'type',
group: 'Renderer options:'
})
.option('o', {
alias: 'output',
description: 'Output file'
})
.help('h')
.alias('h', 'help')
.version()
.example('$0 "some text"', 'Draw in terminal window')
.example('$0 -o out.png "some text"', 'Save as png image')
.example('$0 -d F00 -o out.png "some text"', 'Use red as foreground color')
.parserConfiguration({'parse-numbers': false})
.argv
if (process.stdin.isTTY) {
processInputs(argv._.join(' '), argv)
} else {
var text = ''
process.stdin.setEncoding('utf8')
process.stdin.on('readable', function () {
var chunk = process.stdin.read()
if (chunk !== null) {
text += chunk
}
})
process.stdin.on('end', function () {
// this process can be run as a command outside of a tty so if there was no
// data on stdin read from argv
processInputs(text.length?text:argv._.join(' '), argv)
})
}

View File

@ -1,4 +0,0 @@
#! /usr/bin/env node
var rc = require('./index')
console.log(JSON.stringify(rc(process.argv[2]), false, 2))

View File

@ -1,17 +0,0 @@
@ECHO off
GOTO start
:find_dp0
SET dp0=%~dp0
EXIT /b
:start
SETLOCAL
CALL :find_dp0
IF EXIST "%dp0%\node.exe" (
SET "_prog=%dp0%\node.exe"
) ELSE (
SET "_prog=node"
SET PATHEXT=%PATHEXT:;.JS;=;%
)
endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rc\cli.js" %*

View File

@ -1,28 +0,0 @@
#!/usr/bin/env pwsh
$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent
$exe=""
if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) {
# Fix case when both the Windows and Linux builds of Node
# are installed in the same directory
$exe=".exe"
}
$ret=0
if (Test-Path "$basedir/node$exe") {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "$basedir/node$exe" "$basedir/../rc/cli.js" $args
} else {
& "$basedir/node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
} else {
# Support pipeline input
if ($MyInvocation.ExpectingInput) {
$input | & "node$exe" "$basedir/../rc/cli.js" $args
} else {
& "node$exe" "$basedir/../rc/cli.js" $args
}
$ret=$LASTEXITCODE
}
exit $ret

Some files were not shown because too many files have changed in this diff Show More