From: Matt Corallo Date: Thu, 27 Jul 2023 20:34:12 +0000 (+0000) Subject: Add handling for manual `TxIn` and stop trying to clone `Witness` X-Git-Tag: v0.0.116.0^2~9 X-Git-Url: http://git.bitcoin.ninja/index.cgi?a=commitdiff_plain;h=1d0dc3d656f04efcdf155eadfa8c029fe0103c99;p=ldk-java Add handling for manual `TxIn` and stop trying to clone `Witness` --- diff --git a/gen_type_mapping.py b/gen_type_mapping.py index 2a709ea4..2dfdd641 100644 --- a/gen_type_mapping.py +++ b/gen_type_mapping.py @@ -500,7 +500,7 @@ class TypeMappingGenerator: to_hu_conv = self.consts.var_decl_statement(ty_info.java_hu_ty, ty_info.var_name + "_hu_conv", "new " + ty_info.java_hu_ty + "(null, " + ty_info.var_name + ")") + ";" + to_hu_conv_sfx, to_hu_conv_name = ty_info.var_name + "_hu_conv", from_hu_conv = from_hu_conv) - # The manually-defined types - TxOut, BigEndianScalar, U5, and Error + # The manually-defined types - TxIn, TxOut, BigEndianScalar, U5, and Error if ty_info.rust_obj == "LDKError": assert from_hu_conv is None return ConvInfo(ty_info = ty_info, arg_name = ty_info.var_name, @@ -517,7 +517,7 @@ class TypeMappingGenerator: to_hu_conv = self.consts.var_decl_statement(ty_info.java_hu_ty, ty_info.var_name + "_conv", "new " + ty_info.java_hu_ty + "(" + ty_info.var_name + ")") + ";", to_hu_conv_name = ty_info.var_name + "_conv", from_hu_conv = (ty_info.var_name + ".getVal()", "")) - assert ty_info.rust_obj == "LDKTxOut" or ty_info.rust_obj == "LDKBigEndianScalar" + assert ty_info.rust_obj == "LDKTxOut" or ty_info.rust_obj == "LDKTxIn" or ty_info.rust_obj == "LDKBigEndianScalar" if not ty_info.is_ptr and not holds_ref: ret_conv = (ty_info.rust_obj + "* " + ty_info.var_name + "_ref = MALLOC(sizeof(" + ty_info.rust_obj + "), \"" + ty_info.rust_obj + "\");\n*" + ty_info.var_name + "_ref = ", ";") ret_conv_name = "tag_ptr(" + ty_info.var_name + "_ref, true)" diff --git a/genbindings.py b/genbindings.py index 28b0e7e7..c11cd823 100755 --- a/genbindings.py +++ b/genbindings.py @@ -231,7 +231,10 @@ def java_c_types(fn_arg, ret_arr_len): assert var_is_arr_regex.match(fn_arg[8:]) arr_access = "data" elif fn_arg.startswith("LDKWitness ") or fn_arg == "LDKWitness": - fn_arg = "uint8_t (*" + fn_arg[11:] + ")[datalen]" + if len(fn_arg) > 12 and fn_arg[11] == "*": + fn_arg = "uint8_t (" + fn_arg[11:] + ")[datalen]" + else: + fn_arg = "uint8_t (*" + fn_arg[11:] + ")[datalen]" rust_obj = "LDKWitness" assert var_is_arr_regex.match(fn_arg[8:]) arr_access = "data" @@ -575,6 +578,9 @@ with open(sys.argv[1]) as in_h, open(f"{sys.argv[2]}/bindings{consts.file_ext}", return_type_info = type_mapping_generator.map_type(method_return_type.strip() + " ret", True, ret_arr_len, False, force_holds_ref) if method_name.endswith("_clone") and expected_struct not in unitary_enums: + # LDKWitness is mapped as an array, so no need to implement clone + if expected_struct == "LDKWitness": + return meth_line = "uint64_t " + expected_struct.replace("LDK", "") + "_clone_ptr(" + expected_struct + " *NONNULL_PTR arg)" write_c("static inline " + meth_line + " {\n") write_c("\t" + return_type_info.ret_conv[0].replace("\n", "\n\t")) @@ -641,8 +647,8 @@ with open(sys.argv[1]) as in_h, open(f"{sys.argv[2]}/bindings{consts.file_ext}", expected_struct in complex_enums or expected_struct in complex_enums or expected_struct in result_types or expected_struct in tuple_types) and not is_free impl_on_utils = not impl_on_struct and (not is_free and not method_name.endswith("_clone") and - not method_name.startswith("TxOut") and not method_name.startswith("BigEndianScalar") and - not method_name.startswith("_") and + not method_name.startswith("TxOut") and not method_name.startswith("TxIn") and + not method_name.startswith("BigEndianScalar") and not method_name.startswith("_") and method_name != "check_platform" and method_name != "Result_read" and not expected_struct in unitary_enums and ((not method_name.startswith("C2Tuple_") and not method_name.startswith("C3Tuple_")) @@ -1096,6 +1102,36 @@ with open(sys.argv[1]) as in_h, open(f"{sys.argv[2]}/bindings{consts.file_ext}", write_c("\treturn thing->value;") write_c("}") map_fn(fn_line + "\n", re.compile("(.*) (TxOut_get_value) \((.*)\)").match(fn_line), None, None, None) + elif struct_name == "LDKTxIn": + with open(f"{sys.argv[3]}/structs/TxIn{consts.file_ext}", "w") as out_java_struct: + out_java_struct.write(consts.hu_struct_file_prefix) + out_java_struct.write(consts.txin_defn) + out_java_struct.write(consts.hu_struct_file_suffix) + fn_line = "struct LDKWitness TxIn_get_witness (struct LDKTxIn* thing)" + write_c(fn_line + " {") + write_c("\treturn Witness_clone(&thing->witness);") + write_c("}") + map_fn(fn_line + "\n", re.compile("(.*) (TxIn_get_witness) \((.*)\)").match(fn_line), None, None, None) + fn_line = "struct LDKCVec_u8Z TxIn_get_script_sig (struct LDKTxIn* thing)" + write_c(fn_line + " {") + write_c("\treturn CVec_u8Z_clone(&thing->script_sig);") + write_c("}") + map_fn(fn_line + "\n", re.compile("(.*) (TxIn_get_script_sig) \((.*)\)").match(fn_line), None, None, None) + fn_line = "LDKThirtyTwoBytes TxIn_get_previous_txid (struct LDKTxIn* thing)" + write_c(fn_line + " {") + write_c("\treturn thing->previous_txid;") + write_c("}") + map_fn(fn_line + "\n", re.compile("(.*) (TxIn_get_previous_txid) \((.*)\)").match(fn_line), None, None, None) + fn_line = "uint32_t TxIn_get_previous_vout (struct LDKTxIn* thing)" + write_c(fn_line + " {") + write_c("\treturn thing->previous_vout;") + write_c("}") + map_fn(fn_line + "\n", re.compile("(.*) (TxIn_get_previous_vout) \((.*)\)").match(fn_line), None, None, None) + fn_line = "uint32_t TxIn_get_sequence (struct LDKTxIn* thing)" + write_c(fn_line + " {") + write_c("\treturn thing->sequence;") + write_c("}") + map_fn(fn_line + "\n", re.compile("(.*) (TxIn_get_sequence) \((.*)\)").match(fn_line), None, None, None) elif struct_name == "LDKBigEndianScalar": with open(f"{sys.argv[3]}/structs/BigEndianScalar{consts.file_ext}", "w") as out_java_struct: out_java_struct.write(consts.hu_struct_file_prefix) diff --git a/java_strings.py b/java_strings.py index f5df004d..d903cddf 100644 --- a/java_strings.py +++ b/java_strings.py @@ -130,6 +130,38 @@ class CommonBase { } }""" + self.txin_defn = """public class TxIn extends CommonBase { + /** The witness in this input, in serialized form */ + public final byte[] witness; + /** The script_sig in this input */ + public final byte[] script_sig; + /** The transaction output's sequence number */ + public final int sequence; + /** The txid this input is spending */ + public final byte[] previous_txid; + /** The output index within the spent transaction of the output this input is spending */ + public final int previous_vout; + + TxIn(java.lang.Object _dummy, long ptr) { + super(ptr); + this.witness = bindings.TxIn_get_witness(ptr); + this.script_sig = bindings.TxIn_get_script_sig(ptr); + this.sequence = bindings.TxIn_get_sequence(ptr); + this.previous_txid = bindings.TxIn_get_previous_txid(ptr); + this.previous_vout = bindings.TxIn_get_previous_vout(ptr); + } + /** Constructs a new TxIn, note that previous_txid must be exactly 32 bytes */ + public TxIn(byte[] witness, byte[] script_sig, int sequence, byte[] previous_txid, int previous_vout) { + this(null, bindings.TxIn_new(witness, script_sig, sequence, previous_txid, previous_vout)); + } + + @Override @SuppressWarnings(\"deprecation\") + protected void finalize() throws Throwable { + super.finalize(); + if (ptr != 0) { bindings.TxIn_free(ptr); } + } +}""" + self.scalar_defn = """public class BigEndianScalar extends CommonBase { /** The bytes of the scalar value, in big endian */ public final byte[] scalar_bytes; diff --git a/typescript_strings.py b/typescript_strings.py index ebaf72a1..54b50742 100644 --- a/typescript_strings.py +++ b/typescript_strings.py @@ -445,6 +445,33 @@ export class UnqualifiedError { }""" self.obj_defined(["TxOut"], "structs") + self.txin_defn = """export class TxIn extends CommonBase { + /** The witness in this input, in serialized form */ + public witness: Uint8Array; + /** The script_sig in this input */ + public script_sig: Uint8Array; + /** The transaction output's sequence number */ + public sequence: number; + /** The txid this input is spending */ + public previous_txid: Uint8Array; + /** The output index within the spent transaction of the output this input is spending */ + public previous_vout: number; + + /* @internal */ + public constructor(_dummy: null, ptr: bigint) { + super(ptr, bindings.TxIn_free); + this.witness = bindings.decodeUint8Array(bindings.TxIn_get_witness(ptr)); + this.script_sig = bindings.decodeUint8Array(bindings.TxIn_get_script_sig(ptr)); + this.sequence = bindings.TxIn_get_sequence(ptr); + this.previous_txid = bindings.decodeUint8Array(bindings.TxIn_get_previous_txid(ptr)); + this.previous_vout = bindings.TxIn_get_previous_vout(ptr); + } + public static constructor_new(witness: Uint8Array, script_sig: Uint8Array, sequence: number, previous_txid: Uint8Array, previous_vout: number): TxIn { + return new TxIn(null, bindings.TxIn_new(bindings.encodeUint8Array(witness), bindings.encodeUint8Array(script_sig), sequence, bindings.encodeUint8Array(previous_txid), previous_vout)); + } +}""" + self.obj_defined(["TxIn"], "structs") + self.scalar_defn = """export class BigEndianScalar extends CommonBase { /** The bytes of the scalar value, in big endian */ public scalar_bytes: Uint8Array;