@ -4,239 +4,323 @@
* SPDX - License - Identifier : MIT
* SPDX - License - Identifier : MIT
*/
*/
# define DT_DRV_COMPAT zmk_kscan_gpio_direct
# include "debounce.h"
# include <device.h>
# include <device.h>
# include <drivers/kscan .h>
# include <devicetree .h>
# include <drivers/gpio.h>
# include <drivers/gpio.h>
# include <drivers/kscan.h>
# include <kernel.h>
# include <logging/log.h>
# include <logging/log.h>
# include <sys/util.h>
LOG_MODULE_DECLARE ( zmk , CONFIG_ZMK_LOG_LEVEL ) ;
LOG_MODULE_DECLARE ( zmk , CONFIG_ZMK_LOG_LEVEL ) ;
struct kscan_gpio_item_config {
# define DT_DRV_COMPAT zmk_kscan_gpio_direct
char * label ;
gpio_pin_t pin ;
gpio_flags_t flags ;
} ;
union work_reference {
struct k_work_delayable delayed ;
struct k_work direct ;
} ;
struct kscan_gpio_config {
uint8_t num_of_inputs ;
uint8_t debounce_period ;
struct kscan_gpio_item_config inputs [ ] ;
} ;
struct kscan_gpio_data {
# if CONFIG_ZMK_KSCAN_DEBOUNCE_PRESS_MS >= 0
# if defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING)
# define INST_DEBOUNCE_PRESS_MS(n) CONFIG_ZMK_KSCAN_DEBOUNCE_PRESS_MS
struct k_timer poll_timer ;
# else
# endif /* defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING) */
# define INST_DEBOUNCE_PRESS_MS(n) \
kscan_callback_t callback ;
DT_INST_PROP_OR ( n , debounce_period , DT_INST_PROP ( n , debounce_press_ms ) )
union work_reference work ;
# endif
const struct device * dev ;
uint32_t pin_state ;
const struct device * inputs [ ] ;
} ;
static const struct device * * kscan_gpio_input_devices ( const struct device * dev ) {
# if CONFIG_ZMK_KSCAN_DEBOUNCE_RELEASE_MS >= 0
struct kscan_gpio_data * data = dev - > data ;
# define INST_DEBOUNCE_RELEASE_MS(n) CONFIG_ZMK_KSCAN_DEBOUNCE_RELEASE_MS
return data - > inputs ;
# else
}
# define INST_DEBOUNCE_RELEASE_MS(n) \
DT_INST_PROP_OR ( n , debounce_period , DT_INST_PROP ( n , debounce_release_ms ) )
# endif
static const struct kscan_gpio_item_config * kscan_gpio_input_configs ( const struct device * dev ) {
# define USE_POLLING IS_ENABLED(CONFIG_ZMK_KSCAN_DIRECT_POLLING)
const struct kscan_gpio_config * cfg = dev - > config ;
# define USE_INTERRUPTS (!USE_POLLING)
return cfg - > inputs ;
}
static void kscan_gpio_direct_queue_read ( union work_reference * work , uint8_t debounce_period ) {
# define COND_INTERRUPTS(code) COND_CODE_1(CONFIG_ZMK_KSCAN_DIRECT_POLLING, (), code)
if ( debounce_period > 0 ) {
# define COND_POLL_OR_INTERRUPTS(pollcode, intcode) \
k_work_reschedule ( & work - > delayed , K_MSEC ( debounce_period ) ) ;
COND_CODE_1 ( CONFIG_ZMK_KSCAN_DIRECT_POLLING , pollcode , intcode )
} else {
k_work_submit ( & work - > direct ) ;
}
}
# if !defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING)
# define INST_INPUTS_LEN(n) DT_INST_PROP_LEN(n, input_gpios)
# define KSCAN_DIRECT_INPUT_CFG_INIT(idx, inst_idx) \
GPIO_DT_SPEC_GET_BY_IDX ( DT_DRV_INST ( inst_idx ) , input_gpios , idx ) ,
struct kscan_gpio_irq_callback {
struct kscan_direct _irq_callback {
const struct device * dev ;
const struct device * dev ;
union work_reference * work ;
uint8_t debounce_period ;
struct gpio_callback callback ;
struct gpio_callback callback ;
} ;
} ;
static int kscan_gpio_config_interrupts ( const struct device * dev , gpio_flags_t flags ) {
struct kscan_direct_data {
const struct kscan_gpio_config * cfg = dev - > config ;
const struct device * dev ;
const struct device * * devices = kscan_gpio_input_devices ( dev ) ;
kscan_callback_t callback ;
const struct kscan_gpio_item_config * configs = kscan_gpio_input_configs ( dev ) ;
struct k_work_delayable work ;
# if USE_INTERRUPTS
/** Array of length config->inputs.len */
struct kscan_direct_irq_callback * irqs ;
# endif
/** Timestamp of the current or scheduled scan. */
int64_t scan_time ;
/** Current state of the inputs as an array of length config->inputs.len */
struct debounce_state * pin_state ;
} ;
for ( int i = 0 ; i < cfg - > num_of_inputs ; i + + ) {
struct kscan_gpio_list {
const struct device * dev = devices [ i ] ;
const struct gpio_dt_spec * gpios ;
const struct kscan_gpio_item_config * cfg = & configs [ i ] ;
size_t len ;
} ;
int err = gpio_pin_interrupt_configure ( dev , cfg - > pin , flags ) ;
/** Define a kscan_gpio_list from a compile-time GPIO array. */
# define KSCAN_GPIO_LIST(gpio_array) \
( ( struct kscan_gpio_list ) { . gpios = gpio_array , . len = ARRAY_SIZE ( gpio_array ) } )
struct kscan_direct_config {
struct kscan_gpio_list inputs ;
struct debounce_config debounce_config ;
int32_t debounce_scan_period_ms ;
int32_t poll_period_ms ;
} ;
# if USE_INTERRUPTS
static int kscan_direct_interrupt_configure ( const struct device * dev , const gpio_flags_t flags ) {
const struct kscan_direct_config * config = dev - > config ;
for ( int i = 0 ; i < config - > inputs . len ; i + + ) {
const struct gpio_dt_spec * gpio = & config - > inputs . gpios [ i ] ;
int err = gpio_pin_interrupt_configure_dt ( gpio , flags ) ;
if ( err ) {
if ( err ) {
LOG_ERR ( " Unable to enable direct GPIO interrupt " ) ;
LOG_ERR ( " Unable to configure interrupt for pin %u on %s " , gpio - > pin , gpio - > port - > name ) ;
return err ;
return err ;
}
}
}
}
return 0 ;
return 0 ;
}
}
# endif
static int kscan_gpio_direct_enable ( const struct device * dev ) {
# if USE_INTERRUPTS
return kscan_gpio_config_interrupts ( dev , GPIO_INT_LEVEL_ACTIVE ) ;
static int kscan_direct_interrupt_enable ( const struct device * dev ) {
return kscan_direct_interrupt_configure ( dev , GPIO_INT_LEVEL_ACTIVE ) ;
}
}
static int kscan_gpio_direct_disable ( const struct device * dev ) {
# endif
return kscan_gpio_config_interrupts ( dev , GPIO_INT_DISABLE ) ;
# if USE_INTERRUPTS
static int kscan_direct_interrupt_disable ( const struct device * dev ) {
return kscan_direct_interrupt_configure ( dev , GPIO_INT_DISABLE ) ;
}
}
# endif
# if USE_INTERRUPTS
static void kscan_direct_irq_callback_handler ( const struct device * port , struct gpio_callback * cb ,
const gpio_port_pins_t pin ) {
struct kscan_direct_irq_callback * irq_data =
CONTAINER_OF ( cb , struct kscan_direct_irq_callback , callback ) ;
struct kscan_direct_data * data = irq_data - > dev - > data ;
static void kscan_gpio_irq_callback_handler ( const struct device * dev , struct gpio_callback * cb ,
// Disable our interrupts temporarily to avoid re-entry while we scan.
gpio_port_pins_t pin ) {
kscan_direct_interrupt_disable ( data - > dev ) ;
struct kscan_gpio_irq_callback * data =
CONTAINER_OF ( cb , struct kscan_gpio_irq_callback , callback ) ;
kscan_gpio_direct_disable ( data - > dev ) ;
data - > scan_time = k_uptime_get ( ) ;
kscan_gpio_direct_queue_read ( data - > work , data - > debounce_period ) ;
k_work_reschedule ( & data - > work , K_NO_WAIT ) ;
}
}
# endif
# else /* !defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING) */
static void kscan_direct_read_continue ( const struct device * dev ) {
const struct kscan_direct_config * config = dev - > config ;
struct kscan_direct_data * data = dev - > data ;
static void kscan_gpio_timer_handler ( struct k_timer * timer ) {
data - > scan_time + = config - > debounce_scan_period_ms ;
struct kscan_gpio_data * data = CONTAINER_OF ( timer , struct kscan_gpio_data , poll_timer ) ;
kscan_gpio_direct_queue_read ( & data - > work , 0 ) ;
k_work_reschedule ( & data - > work , K_TIMEOUT_ABS_MS ( data - > scan_time ) ) ;
}
}
static int kscan_gpio_direct_enable ( const struct device * dev ) {
static void kscan_direct_read_end ( const struct device * dev ) {
struct kscan_gpio_data * data = dev - > data ;
# if USE_INTERRUPTS
k_timer_start ( & data - > poll_timer , K_MSEC ( 10 ) , K_MSEC ( 10 ) ) ;
// Return to waiting for an interrupt.
return 0 ;
kscan_direct_interrupt_enable ( dev ) ;
# else
struct kscan_direct_data * data = dev - > data ;
const struct kscan_direct_config * config = dev - > config ;
data - > scan_time + = config - > poll_period_ms ;
// Return to polling slowly.
k_work_reschedule ( & data - > work , K_TIMEOUT_ABS_MS ( data - > scan_time ) ) ;
# endif
}
}
static int kscan_gpio_direct_disable ( const struct device * dev ) {
struct kscan_gpio_data * data = dev - > data ;
static int kscan_direct_read ( const struct device * dev ) {
k_timer_stop ( & data - > poll_timer ) ;
struct kscan_direct_data * data = dev - > data ;
const struct kscan_direct_config * config = dev - > config ;
// Read the inputs.
for ( int i = 0 ; i < config - > inputs . len ; i + + ) {
const struct gpio_dt_spec * gpio = & config - > inputs . gpios [ i ] ;
const bool active = gpio_pin_get_dt ( gpio ) ;
debounce_update ( & data - > pin_state [ i ] , active , config - > debounce_scan_period_ms ,
& config - > debounce_config ) ;
}
// Process the new state.
bool continue_scan = false ;
for ( int i = 0 ; i < config - > inputs . len ; i + + ) {
struct debounce_state * state = & data - > pin_state [ i ] ;
if ( debounce_get_changed ( state ) ) {
const bool pressed = debounce_is_pressed ( state ) ;
LOG_DBG ( " Sending event at 0,%i state %s " , i , pressed ? " on " : " off " ) ;
data - > callback ( dev , 0 , i , pressed ) ;
}
continue_scan = continue_scan | | debounce_is_active ( state ) ;
}
if ( continue_scan ) {
// At least one key is pressed or the debouncer has not yet decided if
// it is pressed. Poll quickly until everything is released.
kscan_direct_read_continue ( dev ) ;
} else {
// All keys are released. Return to normal.
kscan_direct_read_end ( dev ) ;
}
return 0 ;
return 0 ;
}
}
# endif /* defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING) */
static void kscan_direct_work_handler ( struct k_work * work ) {
struct k_work_delayable * dwork = CONTAINER_OF ( work , struct k_work_delayable , work ) ;
struct kscan_direct_data * data = CONTAINER_OF ( dwork , struct kscan_direct_data , work ) ;
kscan_direct_read ( data - > dev ) ;
}
static int kscan_direct_configure ( const struct device * dev , kscan_callback_t callback ) {
struct kscan_direct_data * data = dev - > data ;
static int kscan_gpio_direct_configure ( const struct device * dev , kscan_callback_t callback ) {
struct kscan_gpio_data * data = dev - > data ;
if ( ! callback ) {
if ( ! callback ) {
return - EINVAL ;
return - EINVAL ;
}
}
data - > callback = callback ;
data - > callback = callback ;
return 0 ;
return 0 ;
}
}
static int kscan_gpio_read ( const struct device * dev ) {
static int kscan_direct_enable ( const struct device * dev ) {
struct kscan_gpio_data * data = dev - > data ;
struct kscan_direct_data * data = dev - > data ;
const struct kscan_gpio_config * cfg = dev - > config ;
uint32_t read_state = data - > pin_state ;
data - > scan_time = k_uptime_get ( ) ;
bool submit_follow_up_read = false ;
for ( int i = 0 ; i < cfg - > num_of_inputs ; i + + ) {
// Read will automatically start interrupts/polling once done.
const struct device * in_dev = kscan_gpio_input_devices ( dev ) [ i ] ;
return kscan_direct_read ( dev ) ;
const struct kscan_gpio_item_config * in_cfg = & kscan_gpio_input_configs ( dev ) [ i ] ;
}
WRITE_BIT ( read_state , i , gpio_pin_get ( in_dev , in_cfg - > pin ) > 0 ) ;
}
static int kscan_direct_disable ( const struct device * dev ) {
for ( int i = 0 ; i < cfg - > num_of_inputs ; i + + ) {
struct kscan_direct_data * data = dev - > data ;
bool prev_pressed = BIT ( i ) & data - > pin_state ;
bool pressed = ( BIT ( i ) & read_state ) ! = 0 ;
k_work_cancel_delayable ( & data - > work ) ;
submit_follow_up_read = ( submit_follow_up_read | | pressed ) ;
if ( pressed ! = prev_pressed ) {
# if USE_INTERRUPTS
LOG_DBG ( " Sending event at %d,%d state %s " , 0 , i , ( pressed ? " on " : " off " ) ) ;
return kscan_direct_interrupt_disable ( dev ) ;
WRITE_BIT ( data - > pin_state , i , pressed ) ;
# else
data - > callback ( dev , 0 , i , pressed ) ;
return 0 ;
# endif
}
static int kscan_direct_init_input_inst ( const struct device * dev , const struct gpio_dt_spec * gpio ,
const int index ) {
if ( ! device_is_ready ( gpio - > port ) ) {
LOG_ERR ( " GPIO is not ready: %s " , gpio - > port - > name ) ;
return - ENODEV ;
}
}
int err = gpio_pin_configure_dt ( gpio , GPIO_INPUT ) ;
if ( err ) {
LOG_ERR ( " Unable to configure pin %u on %s for input " , gpio - > pin , gpio - > port - > name ) ;
return err ;
}
}
# if !defined(CONFIG_ZMK_KSCAN_DIRECT_POLLING)
LOG_DBG ( " Configured pin %u on %s for input " , gpio - > pin , gpio - > port - > name ) ;
if ( submit_follow_up_read ) {
kscan_gpio_direct_queue_read ( & data - > work , cfg - > debounce_period ) ;
# if USE_INTERRUPTS
} else {
struct kscan_direct_data * data = dev - > data ;
kscan_gpio_direct_enable ( dev ) ;
struct kscan_direct_irq_callback * irq = & data - > irqs [ index ] ;
irq - > dev = dev ;
gpio_init_callback ( & irq - > callback , kscan_direct_irq_callback_handler , BIT ( gpio - > pin ) ) ;
err = gpio_add_callback ( gpio - > port , & irq - > callback ) ;
if ( err ) {
LOG_ERR ( " Error adding the callback to the input device: %i " , err ) ;
return err ;
}
}
# endif
# endif
return 0 ;
return 0 ;
}
}
static void kscan_gpio_work_handler ( struct k_work * work ) {
static int kscan_direct_init_inputs ( const struct device * dev ) {
struct kscan_gpio_data * data = CONTAINER_OF ( work , struct kscan_gpio_data , work ) ;
const struct kscan_direct_config * config = dev - > config ;
kscan_gpio_read ( data - > dev ) ;
for ( int i = 0 ; i < config - > inputs . len ; i + + ) {
const struct gpio_dt_spec * gpio = & config - > inputs . gpios [ i ] ;
int err = kscan_direct_init_input_inst ( dev , gpio , i ) ;
if ( err ) {
return err ;
}
}
return 0 ;
}
static int kscan_direct_init ( const struct device * dev ) {
struct kscan_direct_data * data = dev - > data ;
data - > dev = dev ;
kscan_direct_init_inputs ( dev ) ;
k_work_init_delayable ( & data - > work , kscan_direct_work_handler ) ;
return 0 ;
}
}
static const struct kscan_driver_api gpio_driver_api = {
static const struct kscan_driver_api kscan_direct _api = {
. config = kscan_gpio_direct_configure ,
. config = kscan_direct_configure ,
. enable_callback = kscan_gpio_direct_enable ,
. enable_callback = kscan_direct_enable ,
. disable_callback = kscan_gpio_direct_disable ,
. disable_callback = kscan_direct_disable ,
} ;
} ;
# define KSCAN_DIRECT_INPUT_ITEM(i, n) \
# define KSCAN_DIRECT_INIT(n) \
BUILD_ASSERT ( INST_DEBOUNCE_PRESS_MS ( n ) < = DEBOUNCE_COUNTER_MAX , \
" ZMK_KSCAN_DEBOUNCE_PRESS_MS or debounce-press-ms is too large " ) ; \
BUILD_ASSERT ( INST_DEBOUNCE_RELEASE_MS ( n ) < = DEBOUNCE_COUNTER_MAX , \
" ZMK_KSCAN_DEBOUNCE_RELEASE_MS or debounce-release-ms is too large " ) ; \
\
static const struct gpio_dt_spec kscan_direct_inputs_ # # n [ ] = { \
UTIL_LISTIFY ( INST_INPUTS_LEN ( n ) , KSCAN_DIRECT_INPUT_CFG_INIT , n ) } ; \
\
static struct debounce_state kscan_direct_state_ # # n [ INST_INPUTS_LEN ( n ) ] ; \
\
COND_INTERRUPTS ( \
( static struct kscan_direct_irq_callback kscan_direct_irqs_ # # n [ INST_INPUTS_LEN ( n ) ] ; ) ) \
\
static struct kscan_direct_data kscan_direct_data_ # # n = { \
. pin_state = kscan_direct_state_ # # n , COND_INTERRUPTS ( ( . irqs = kscan_direct_irqs_ # # n , ) ) } ; \
\
static struct kscan_direct_config kscan_direct_config_ # # n = { \
. inputs = KSCAN_GPIO_LIST ( kscan_direct_inputs_ # # n ) , \
. debounce_config = \
{ \
{ \
. label = DT_INST_GPIO_LABEL_BY_IDX ( n , input_gpios , i ) , \
. debounce_press_ms = INST_DEBOUNCE_PRESS_MS ( n ) , \
. pin = DT_INST_GPIO_PIN_BY_IDX ( n , input_gpios , i ) , \
. debounce_release_ms = INST_DEBOUNCE_RELEASE_MS ( n ) , \
. flags = DT_INST_GPIO_FLAGS_BY_IDX ( n , input_gpios , i ) , \
} , \
} ,
. debounce_scan_period_ms = DT_INST_PROP ( n , debounce_scan_period_ms ) , \
. poll_period_ms = DT_INST_PROP ( n , poll_period_ms ) , \
# define INST_INPUT_LEN(n) DT_INST_PROP_LEN(n, input_gpios)
} ; \
\
# define GPIO_INST_INIT(n) \
DEVICE_DT_INST_DEFINE ( n , & kscan_direct_init , NULL , & kscan_direct_data_ # # n , \
COND_CODE_0 ( IS_ENABLED ( CONFIG_ZMK_KSCAN_DIRECT_POLLING ) , \
& kscan_direct_config_ # # n , APPLICATION , CONFIG_APPLICATION_INIT_PRIORITY , \
( static struct kscan_gpio_irq_callback irq_callbacks_ # # n [ INST_INPUT_LEN ( n ) ] ; ) , ( ) ) \
& kscan_direct_api ) ;
static struct kscan_gpio_data kscan_gpio_data_ # # n = { \
. inputs = { [ INST_INPUT_LEN ( n ) - 1 ] = NULL } } ; \
DT_INST_FOREACH_STATUS_OKAY ( KSCAN_DIRECT_INIT ) ;
static int kscan_gpio_init_ # # n ( const struct device * dev ) { \
struct kscan_gpio_data * data = dev - > data ; \
const struct kscan_gpio_config * cfg = dev - > config ; \
int err ; \
const struct device * * input_devices = kscan_gpio_input_devices ( dev ) ; \
for ( int i = 0 ; i < cfg - > num_of_inputs ; i + + ) { \
const struct kscan_gpio_item_config * in_cfg = & kscan_gpio_input_configs ( dev ) [ i ] ; \
input_devices [ i ] = device_get_binding ( in_cfg - > label ) ; \
if ( ! input_devices [ i ] ) { \
LOG_ERR ( " Unable to find input GPIO device " ) ; \
return - EINVAL ; \
} \
err = gpio_pin_configure ( input_devices [ i ] , in_cfg - > pin , GPIO_INPUT | in_cfg - > flags ) ; \
if ( err ) { \
LOG_ERR ( " Unable to configure pin %d on %s for input " , in_cfg - > pin , in_cfg - > label ) ; \
return err ; \
} \
COND_CODE_0 ( \
IS_ENABLED ( CONFIG_ZMK_KSCAN_DIRECT_POLLING ) , \
( irq_callbacks_ # # n [ i ] . work = & data - > work ; irq_callbacks_ # # n [ i ] . dev = dev ; \
irq_callbacks_ # # n [ i ] . debounce_period = cfg - > debounce_period ; \
gpio_init_callback ( & irq_callbacks_ # # n [ i ] . callback , \
kscan_gpio_irq_callback_handler , BIT ( in_cfg - > pin ) ) ; \
err = gpio_add_callback ( input_devices [ i ] , & irq_callbacks_ # # n [ i ] . callback ) ; \
if ( err ) { \
LOG_ERR ( " Error adding the callback to the column device " ) ; \
return err ; \
} ) , \
( ) ) \
} \
data - > dev = dev ; \
COND_CODE_1 ( IS_ENABLED ( CONFIG_ZMK_KSCAN_DIRECT_POLLING ) , \
( k_timer_init ( & data - > poll_timer , kscan_gpio_timer_handler , NULL ) ; ) , ( ) ) \
if ( cfg - > debounce_period > 0 ) { \
k_work_init_delayable ( & data - > work . delayed , kscan_gpio_work_handler ) ; \
} else { \
k_work_init ( & data - > work . direct , kscan_gpio_work_handler ) ; \
} \
return 0 ; \
} \
static const struct kscan_gpio_config kscan_gpio_config_ # # n = { \
. inputs = { UTIL_LISTIFY ( INST_INPUT_LEN ( n ) , KSCAN_DIRECT_INPUT_ITEM , n ) } , \
. num_of_inputs = INST_INPUT_LEN ( n ) , \
. debounce_period = DT_INST_PROP ( n , debounce_period ) } ; \
DEVICE_DT_INST_DEFINE ( n , kscan_gpio_init_ # # n , NULL , & kscan_gpio_data_ # # n , \
& kscan_gpio_config_ # # n , POST_KERNEL , CONFIG_ZMK_KSCAN_INIT_PRIORITY , \
& gpio_driver_api ) ;
DT_INST_FOREACH_STATUS_OKAY ( GPIO_INST_INIT )