diff --git a/py/emitnative.c b/py/emitnative.c
index 4966e37e301b4fe6fd37ad347eda6cdecbe59b37..87329808e9c89e8a26e43662e0ee0c7005eeada6 100644
--- a/py/emitnative.c
+++ b/py/emitnative.c
@@ -530,56 +530,6 @@ STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, in
     emit_post_push_reg(emit, vtyped, regd);
 }
 
-// vtype of all n_pop objects is VTYPE_PYOBJ
-// does not use any temporary registers (but may use reg_dest before loading it with stack pointer)
-// TODO this needs some thinking for viper code
-STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, int reg_dest, int n_pop) {
-    need_reg_all(emit);
-    for (int i = 0; i < n_pop; i++) {
-        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
-        // must push any imm's to stack
-        // must convert them to VTYPE_PYOBJ for viper code
-        if (si->kind == STACK_IMM) {
-            si->kind = STACK_VALUE;
-            switch (si->vtype) {
-                case VTYPE_PYOBJ:
-                    ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
-                    break;
-                case VTYPE_BOOL:
-                    si->vtype = VTYPE_PYOBJ;
-                    if (si->u_imm == 0) {
-                        ASM_MOV_IMM_TO_LOCAL_USING((mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
-                    } else {
-                        ASM_MOV_IMM_TO_LOCAL_USING((mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
-                    }
-                    break;
-                case VTYPE_INT:
-                    si->vtype = VTYPE_PYOBJ;
-                    ASM_MOV_IMM_TO_LOCAL_USING((si->u_imm << 1) | 1, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
-                    break;
-                default:
-                    // not handled
-                    assert(0);
-            }
-        }
-        assert(si->kind == STACK_VALUE);
-        assert(si->vtype == VTYPE_PYOBJ);
-    }
-    adjust_stack(emit, -n_pop);
-    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size, reg_dest);
-}
-
-// vtype of all n_push objects is VTYPE_PYOBJ
-STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, int reg_dest, int n_push) {
-    need_reg_all(emit);
-    for (int i = 0; i < n_push; i++) {
-        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
-        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
-    }
-    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size, reg_dest);
-    adjust_stack(emit, n_push);
-}
-
 STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind, void *fun) {
     need_reg_all(emit);
 #if N_X64
@@ -634,26 +584,84 @@ STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kin
 #endif
 }
 
-STATIC void emit_native_load_id(emit_t *emit, qstr qstr) {
-    // check for built-ins
-    if (strcmp(qstr_str(qstr), "v_int") == 0) {
-        assert(0);
-        emit_native_pre(emit);
-        //emit_post_push_blank(emit, VTYPE_BUILTIN_V_INT);
+// vtype of all n_pop objects is VTYPE_PYOBJ
+// Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
+// If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
+// Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
+STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
+    need_reg_all(emit);
 
-    // not a built-in, so do usual thing
-    } else {
-        emit_common_load_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
+    // First, store any immediate values to their respective place on the stack.
+    for (mp_uint_t i = 0; i < n_pop; i++) {
+        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
+        // must push any imm's to stack
+        // must convert them to VTYPE_PYOBJ for viper code
+        if (si->kind == STACK_IMM) {
+            si->kind = STACK_VALUE;
+            switch (si->vtype) {
+                case VTYPE_PYOBJ:
+                    ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
+                    break;
+                case VTYPE_BOOL:
+                    if (si->u_imm == 0) {
+                        ASM_MOV_IMM_TO_LOCAL_USING((mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
+                    } else {
+                        ASM_MOV_IMM_TO_LOCAL_USING((mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
+                    }
+                    si->vtype = VTYPE_PYOBJ;
+                    break;
+                case VTYPE_INT:
+                case VTYPE_UINT:
+                    ASM_MOV_IMM_TO_LOCAL_USING((si->u_imm << 1) | 1, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
+                    si->vtype = VTYPE_PYOBJ;
+                    break;
+                default:
+                    // not handled
+                    assert(0);
+            }
+        }
+
+        // verify that this value is on the stack
+        assert(si->kind == STACK_VALUE);
+    }
+
+    // Second, convert any non-VTYPE_PYOBJ to that type.
+    for (mp_uint_t i = 0; i < n_pop; i++) {
+        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
+        if (si->vtype != VTYPE_PYOBJ) {
+            mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
+            ASM_MOV_LOCAL_TO_REG(local_num, REG_ARG_1);
+            emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, mp_convert_native_to_obj, si->vtype, REG_ARG_2); // arg2 = type
+            ASM_MOV_REG_TO_LOCAL(REG_RET, local_num);
+            si->vtype = VTYPE_PYOBJ;
+        }
+    }
+
+    // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
+    adjust_stack(emit, -n_pop);
+    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size, reg_dest);
+}
+
+// vtype of all n_push objects is VTYPE_PYOBJ
+STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
+    need_reg_all(emit);
+    for (mp_uint_t i = 0; i < n_push; i++) {
+        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
+        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
     }
+    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size, reg_dest);
+    adjust_stack(emit, n_push);
+}
+
+STATIC void emit_native_load_id(emit_t *emit, qstr qstr) {
+    emit_common_load_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
 }
 
 STATIC void emit_native_store_id(emit_t *emit, qstr qstr) {
-    // TODO check for built-ins and disallow
     emit_common_store_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
 }
 
 STATIC void emit_native_delete_id(emit_t *emit, qstr qstr) {
-    // TODO check for built-ins and disallow
     emit_common_delete_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
 }
 
@@ -1396,7 +1404,7 @@ STATIC void emit_native_call_function(emit_t *emit, int n_positional, int n_keyw
     vtype_kind_t vtype_fun;
     emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
     assert(vtype_fun == VTYPE_PYOBJ);
-    emit_call_with_imm_arg(emit, MP_F_CALL_FUNCTION_N_KW_FOR_NATIVE, mp_call_function_n_kw_for_native, n_positional | (n_keyword << 8), REG_ARG_2);
+    emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, mp_native_call_function_n_kw, n_positional | (n_keyword << 8), REG_ARG_2);
     emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
 }
 
@@ -1453,13 +1461,13 @@ STATIC void emit_native_return_value(emit_t *emit) {
 
 STATIC void emit_native_raise_varargs(emit_t *emit, int n_args) {
     assert(n_args == 1);
-    vtype_kind_t vtype_err;
-    emit_pre_pop_reg(emit, &vtype_err, REG_ARG_1); // arg1 = object to raise
-    assert(vtype_err == VTYPE_PYOBJ);
-    emit_call(emit, 0, mp_make_raise_obj); // TODO need to add function to runtime table
-    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
-    emit_pre_pop_reg(emit, &vtype_err, REG_ARG_1);
-    emit_call(emit, 0, nlr_jump); // TODO need to add function to runtime table
+    vtype_kind_t vtype_exc;
+    emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
+    if (vtype_exc != VTYPE_PYOBJ) {
+        printf("ViperTypeError: must raise an object\n");
+    }
+    // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
+    emit_call(emit, MP_F_NATIVE_RAISE, mp_native_raise);
 }
 
 STATIC void emit_native_yield_value(emit_t *emit) {
diff --git a/py/runtime.c b/py/runtime.c
index f233a772c0760a4eb0fee591f85ab14cd8af9a82..caaf795ef9c94837f1cf391607b9fff6bbbc5b61 100644
--- a/py/runtime.c
+++ b/py/runtime.c
@@ -517,12 +517,6 @@ mp_obj_t mp_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
     return mp_call_function_n_kw(fun, 2, 0, args);
 }
 
-// wrapper that accepts n_args and n_kw in one argument
-// native emitter can only pass at most 3 arguments to a function
-mp_obj_t mp_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
-    return mp_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
-}
-
 // args contains, eg: arg0  arg1  key0  value0  key1  value1
 mp_obj_t mp_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
     // TODO improve this: fun object can specify its type and we parse here the arguments,
@@ -1187,6 +1181,17 @@ mp_obj_t mp_convert_native_to_obj(mp_uint_t val, mp_uint_t type) {
     }
 }
 
+// wrapper that accepts n_args and n_kw in one argument
+// (native emitter can only pass at most 3 arguments to a function)
+mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
+    return mp_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
+}
+
+// wrapper that makes raise obj and raises it
+NORETURN void mp_native_raise(mp_obj_t o) {
+    nlr_raise(mp_make_raise_obj(o));
+}
+
 // these must correspond to the respective enum
 void *const mp_fun_table[MP_F_NUMBER_OF] = {
     mp_convert_obj_to_native,
@@ -1216,10 +1221,11 @@ void *const mp_fun_table[MP_F_NUMBER_OF] = {
     mp_obj_set_store,
 #endif
     mp_make_function_from_raw_code,
-    mp_call_function_n_kw_for_native,
+    mp_native_call_function_n_kw,
     mp_call_method_n_kw,
     mp_getiter,
     mp_iternext,
+    mp_native_raise,
     mp_import_name,
     mp_import_from,
     mp_import_all,
diff --git a/py/runtime.h b/py/runtime.h
index f908d9d26d43953706815b99f7ff6851f87c0bd1..c46087d1452d0aac0dbbf6c1bd2a2c5a316c0099 100644
--- a/py/runtime.h
+++ b/py/runtime.h
@@ -87,7 +87,6 @@ mp_obj_t mp_make_function_var_between(int n_args_min, int n_args_max, mp_fun_var
 mp_obj_t mp_call_function_0(mp_obj_t fun);
 mp_obj_t mp_call_function_1(mp_obj_t fun, mp_obj_t arg);
 mp_obj_t mp_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2);
-mp_obj_t mp_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args);
 mp_obj_t mp_call_function_n_kw(mp_obj_t fun, uint n_args, uint n_kw, const mp_obj_t *args);
 mp_obj_t mp_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args);
 mp_obj_t mp_call_method_n_kw_var(bool have_self, uint n_args_n_kw, const mp_obj_t *args);
@@ -115,8 +114,11 @@ void mp_import_all(mp_obj_t module);
 // Raise NotImplementedError with given message
 NORETURN void mp_not_implemented(const char *msg);
 
+// helper functions for native/viper code
 mp_uint_t mp_convert_obj_to_native(mp_obj_t obj, mp_uint_t type);
 mp_obj_t mp_convert_native_to_obj(mp_uint_t val, mp_uint_t type);
+mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args);
+NORETURN void mp_native_raise(mp_obj_t o);
 
 extern struct _mp_obj_list_t mp_sys_path_obj;
 extern struct _mp_obj_list_t mp_sys_argv_obj;
diff --git a/py/runtime0.h b/py/runtime0.h
index dfce041b8f13b6ae2efb3f71fec614f2c8e14e7e..8ae8afacc7e1541b9cc2f8dd7e072f50f9d24ba8 100644
--- a/py/runtime0.h
+++ b/py/runtime0.h
@@ -129,12 +129,13 @@ typedef enum {
     MP_F_STORE_SET,
 #endif
     MP_F_MAKE_FUNCTION_FROM_RAW_CODE,
-    MP_F_CALL_FUNCTION_N_KW_FOR_NATIVE,
+    MP_F_NATIVE_CALL_FUNCTION_N_KW,
     MP_F_CALL_METHOD_N_KW,
     MP_F_GETITER,
     MP_F_ITERNEXT,
+    MP_F_NATIVE_RAISE,
     MP_F_IMPORT_NAME,
-    MP_F_IMPORT_FROM,
+    MP_F_IMPORT_FROM, // = 31 XXX this is the limit for thumb code...
     MP_F_IMPORT_ALL,
 #if MICROPY_PY_BUILTINS_SLICE
     MP_F_NEW_SLICE,
diff --git a/tests/micropython/viper.py b/tests/micropython/viper.py
index 36849abafd32701290b6e375649f8e876cec54d0..5d4f4fd7b5f9e7669b76f241729b69fbd5c110e7 100644
--- a/tests/micropython/viper.py
+++ b/tests/micropython/viper.py
@@ -2,48 +2,80 @@ import micropython
 
 # viper function taking and returning ints
 @micropython.viper
-def f(x:int, y:int) -> int:
+def viper_int(x:int, y:int) -> int:
     return x + y + 3
+print(viper_int(1, 2))
 
 # viper function taking and returning objects
 @micropython.viper
-def g(x:object, y:object) -> object:
+def viper_object(x:object, y:object) -> object:
     return x + y
+print(viper_object(1, 2))
 
 # a local (should have automatic type int)
 @micropython.viper
-def h(x:int) -> int:
+def viper_local(x:int) -> int:
     y = 4
     return x + y
+print(viper_local(3))
 
 # without type annotation, types should default to object
 @micropython.viper
-def i(x, y):
+def viper_no_annotation(x, y):
     return x * y
+print(viper_no_annotation(4, 5))
 
 # a for loop
 @micropython.viper
-def viper_sum(a:int, b:int) -> int:
+def viper_for(a:int, b:int) -> int:
     total = 0
     for x in range(a, b):
         total += x
     return total
+print(viper_for(10, 10000))
 
 # accessing a global
 @micropython.viper
-def access_global():
+def viper_access_global():
     global gl
     gl = 1
     return gl
+print(viper_access_global(), gl)
+
+# calling print with object and int types
+@micropython.viper
+def viper_print(x, y:int):
+    print(x, y + 1)
+viper_print(1, 2)
+
+# making a tuple from an object and an int
+@micropython.viper
+def viper_tuple(x, y:int):
+    return (x, y + 1)
+print(viper_tuple(1, 2))
+
+# making a list from an object and an int
+@micropython.viper
+def viper_list(x, y:int):
+    return [x, y + 1]
+print(viper_list(1, 2))
+
+# making a set from an object and an int
+@micropython.viper
+def viper_set(x, y:int):
+    return {x, y + 1}
+print(sorted(list(viper_set(1, 2))))
+
+# raising an exception
+@micropython.viper
+def viper_raise(x:int):
+    raise SystemError(x)
+try:
+    viper_raise(1)
+except SystemError as e:
+    print(repr(e))
 
 # this doesn't work at the moment
 #@micropython.viper
 #def g() -> uint:
 #    return -1
-
-print(f(1, 2))
-print(g(1, 2))
-print(h(3))
-print(i(4, 5))
-print(viper_sum(10, 10000))
-print(access_global(), gl)
diff --git a/tests/micropython/viper.py.exp b/tests/micropython/viper.py.exp
index 4c308316b2d4a083f0632a374d83352b82d8ded1..28214aa1794c8f952315873473229620be794de1 100644
--- a/tests/micropython/viper.py.exp
+++ b/tests/micropython/viper.py.exp
@@ -4,3 +4,8 @@
 20
 49994955
 1 1
+1 3
+(1, 3)
+[1, 3]
+[1, 3]
+SystemError(1,)