asn1-parser.js
5.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
// Copyright 2016-2018 AJ ONeal. All rights reserved
// https://git.coolaj86.com/coolaj86/asn1-parser.js
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
;(function (exports) {
'use strict';
if (!exports.ASN1) { exports.ASN1 = {}; }
if (!exports.Enc) { exports.Enc = {}; }
if (!exports.PEM) { exports.PEM = {}; }
var ASN1 = exports.ASN1;
var Enc = exports.Enc;
var PEM = exports.PEM;
//
// Parser
//
// Although I've only seen 9 max in https certificates themselves,
// but each domain list could have up to 100
ASN1.ELOOPN = 102;
ASN1.ELOOP = "uASN1.js Error: iterated over " + ASN1.ELOOPN + "+ elements (probably a malformed file)";
// I've seen https certificates go 29 deep
ASN1.EDEEPN = 60;
ASN1.EDEEP = "uASN1.js Error: element nested " + ASN1.EDEEPN + "+ layers deep (probably a malformed file)";
// Container Types are Sequence 0x30, Container Array? (0xA0, 0xA1)
// Value Types are Boolean 0x01, Integer 0x02, Null 0x05, Object ID 0x06, String 0x0C, 0x16, 0x13, 0x1e Value Array? (0x82)
// Bit String (0x03) and Octet String (0x04) may be values or containers
// Sometimes Bit String is used as a container (RSA Pub Spki)
ASN1.CTYPES = [ 0x30, 0x31, 0xa0, 0xa1 ];
ASN1.VTYPES = [ 0x01, 0x02, 0x05, 0x06, 0x0c, 0x82 ];
ASN1.parse = function parseAsn1Helper(buf) {
//var ws = ' ';
function parseAsn1(buf, depth, eager) {
if (depth.length >= ASN1.EDEEPN) { throw new Error(ASN1.EDEEP); }
var index = 2; // we know, at minimum, data starts after type (0) and lengthSize (1)
var asn1 = { type: buf[0], lengthSize: 0, length: buf[1] };
var child;
var iters = 0;
var adjust = 0;
var adjustedLen;
// Determine how many bytes the length uses, and what it is
if (0x80 & asn1.length) {
asn1.lengthSize = 0x7f & asn1.length;
// I think that buf->hex->int solves the problem of Endianness... not sure
asn1.length = parseInt(Enc.bufToHex(buf.slice(index, index + asn1.lengthSize)), 16);
index += asn1.lengthSize;
}
// High-order bit Integers have a leading 0x00 to signify that they are positive.
// Bit Streams use the first byte to signify padding, which x.509 doesn't use.
if (0x00 === buf[index] && (0x02 === asn1.type || 0x03 === asn1.type)) {
// However, 0x00 on its own is a valid number
if (asn1.length > 1) {
index += 1;
adjust = -1;
}
}
adjustedLen = asn1.length + adjust;
//console.warn(depth.join(ws) + '0x' + Enc.numToHex(asn1.type), index, 'len:', asn1.length, asn1);
function parseChildren(eager) {
asn1.children = [];
//console.warn('1 len:', (2 + asn1.lengthSize + asn1.length), 'idx:', index, 'clen:', 0);
while (iters < ASN1.ELOOPN && index < (2 + asn1.length + asn1.lengthSize)) {
iters += 1;
depth.length += 1;
child = parseAsn1(buf.slice(index, index + adjustedLen), depth, eager);
depth.length -= 1;
// The numbers don't match up exactly and I don't remember why...
// probably something with adjustedLen or some such, but the tests pass
index += (2 + child.lengthSize + child.length);
//console.warn('2 len:', (2 + asn1.lengthSize + asn1.length), 'idx:', index, 'clen:', (2 + child.lengthSize + child.length));
if (index > (2 + asn1.lengthSize + asn1.length)) {
if (!eager) { console.error(JSON.stringify(asn1, ASN1._replacer, 2)); }
throw new Error("Parse error: child value length (" + child.length
+ ") is greater than remaining parent length (" + (asn1.length - index)
+ " = " + asn1.length + " - " + index + ")");
}
asn1.children.push(child);
//console.warn(depth.join(ws) + '0x' + Enc.numToHex(asn1.type), index, 'len:', asn1.length, asn1);
}
if (index !== (2 + asn1.lengthSize + asn1.length)) {
//console.warn('index:', index, 'length:', (2 + asn1.lengthSize + asn1.length));
throw new Error("premature end-of-file");
}
if (iters >= ASN1.ELOOPN) { throw new Error(ASN1.ELOOP); }
delete asn1.value;
return asn1;
}
// Recurse into types that are _always_ containers
if (-1 !== ASN1.CTYPES.indexOf(asn1.type)) { return parseChildren(eager); }
// Return types that are _always_ values
asn1.value = buf.slice(index, index + adjustedLen);
if (-1 !== ASN1.VTYPES.indexOf(asn1.type)) { return asn1; }
// For ambigious / unknown types, recurse and return on failure
// (and return child array size to zero)
try { return parseChildren(true); }
catch(e) { asn1.children.length = 0; return asn1; }
}
var asn1 = parseAsn1(buf, []);
var len = buf.byteLength || buf.length;
if (len !== 2 + asn1.lengthSize + asn1.length) {
throw new Error("Length of buffer does not match length of ASN.1 sequence.");
}
return asn1;
};
ASN1._replacer = function (k, v) {
if ('type' === k) { return '0x' + Enc.numToHex(v); }
if (v && 'value' === k) { return '0x' + Enc.bufToHex(v.data || v); }
return v;
};
// don't replace the full parseBlock, if it exists
PEM.parseBlock = PEM.parseBlock || function (str) {
var b64 = str.split(/\n/).filter(function (line) {
return !/-----/.test(line);
}).join('');
var der = Enc.base64ToBuf(b64);
return { bytes: der };
};
Enc.base64ToBuf = function (b64) {
return Buffer.from(b64, 'base64');
};
Enc.binToBuf = function (bin) {
return Buffer.from(bin, 'binary');
};
Enc.bufToHex = function (u8) {
return Buffer.from(u8).toString('hex');
};
Enc.numToHex = function (d) {
d = d.toString(16);
if (d.length % 2) {
return '0' + d;
}
return d;
};
}('undefined' !== typeof window ? window : module.exports));